├── .github └── workflows │ └── test.yml ├── .gitignore ├── CHANGES.rst ├── LICENSE.rst ├── Makefile ├── README.rst ├── mx.ini ├── pyproject.toml └── src └── node ├── base.py ├── behaviors ├── __init__.py ├── adopt.py ├── alias.py ├── attributes.py ├── cache.py ├── common.py ├── constraints.py ├── context.py ├── events.py ├── factories.py ├── fallback.py ├── filter.py ├── lifecycle.py ├── mapping.py ├── node.py ├── nodespace.py ├── order.py ├── reference.py ├── schema.py ├── sequence.py └── storage.py ├── compat.py ├── events.py ├── interfaces.py ├── locking.py ├── schema ├── __init__.py ├── fields.py ├── scope.py └── serializer.py ├── serializer.py ├── testing ├── __init__.py ├── base.py ├── env.py ├── fullmapping.py └── profiling.py ├── tests ├── __init__.py ├── test_adopt.py ├── test_alias.py ├── test_attributes.py ├── test_base.py ├── test_cache.py ├── test_common.py ├── test_constraints.py ├── test_context.py ├── test_events.py ├── test_factories.py ├── test_fallback.py ├── test_filter.py ├── test_lifecycle.py ├── test_locking.py ├── test_mapping.py ├── test_node.py ├── test_nodespace.py ├── test_order.py ├── test_reference.py ├── test_schema.py ├── test_sequence.py ├── test_serializer.py ├── test_storage.py ├── test_testing.py ├── test_tests.py └── test_utils.py └── utils.py /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Python Tests 2 | 3 | on: [push] 4 | 5 | jobs: 6 | test: 7 | runs-on: ubuntu-latest 8 | 9 | strategy: 10 | matrix: 11 | python-version: 12 | - "3.10" 13 | - "3.11" 14 | - "3.12" 15 | - "3.13" 16 | - "3.14" 17 | 18 | steps: 19 | - uses: actions/checkout@v5 20 | 21 | - name: Set up Python ${{ matrix.python-version }} 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | 26 | - name: Show Python version 27 | run: python -c "import sys; print(sys.version)" 28 | 29 | - name: Run tests an collect code coverage 30 | run: make coverage 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg-info/ 2 | *.pyc 3 | /.coverage 4 | /.mxmake 5 | /.ruff_cache 6 | /build 7 | /constraints-mxdev.txt 8 | /dist/ 9 | /htmlcov/ 10 | /requirements-mxdev.txt 11 | /sources/ 12 | /venv/ 13 | -------------------------------------------------------------------------------- /LICENSE.rst: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | 4 | Copyright (c) 2009-2021, BlueDynamics Alliance, Austria 5 | Copyright (c) 2021-2025, Node Contributors 6 | All rights reserved. 7 | 8 | Redistribution and use in source and binary forms, with or without 9 | modification, are permitted provided that the following conditions are met: 10 | 11 | * Redistributions of source code must retain the above copyright notice, this 12 | list of conditions and the following disclaimer. 13 | 14 | * Redistributions in binary form must reproduce the above copyright notice, this 15 | list of conditions and the following disclaimer in the documentation and/or 16 | other materials provided with the distribution. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 25 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /mx.ini: -------------------------------------------------------------------------------- 1 | [settings] 2 | threads = 5 3 | 4 | # conestack git URLs 5 | cs = https://github.com/conestack 6 | cs_push = git@github.com:conestack 7 | 8 | # checkout source packages 9 | checkout_packages = true 10 | 11 | # feature branch to checkout 12 | feature_branch = master 13 | 14 | # main package 15 | main-package = -e .[test] 16 | 17 | mxmake-test-runner = pytest 18 | # fixed dependency package versions 19 | version-overrides = 20 | pyramid==2.0.2 21 | 22 | # templates and environment 23 | mxmake-templates = 24 | run-tests 25 | run-coverage 26 | 27 | mxmake-test-path = src 28 | mxmake-source-path = src/node 29 | 30 | [mxmake-env] 31 | TESTRUN_MARKER = 1 32 | 33 | [mxmake-run-tests] 34 | environment = env 35 | 36 | [mxmake-run-coverage] 37 | environment = env 38 | 39 | ############################################################################### 40 | # base packages 41 | ############################################################################### 42 | 43 | [odict] 44 | use = ${settings:checkout_packages} 45 | url = ${settings:cs}/odict.git 46 | pushurl = ${settings:cs_push}/odict.git 47 | branch = ${settings:feature_branch} 48 | mxmake-test-path = tests 49 | mxmake-source-path = src/odict 50 | 51 | [plumber] 52 | use = ${settings:checkout_packages} 53 | url = ${settings:cs}/plumber.git 54 | pushurl = ${settings:cs_push}/plumber.git 55 | branch = ${settings:feature_branch} 56 | extras = test 57 | mxmake-test-path = tests 58 | mxmake-source-path = src/plumber 59 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-fancy-pypi-readme"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "node" 7 | version = "2.0.0.dev0" 8 | description = "Building data structures as node trees" 9 | dynamic = ["readme"] 10 | requires-python = ">=3.10" 11 | license = {text = "Simplified BSD"} 12 | authors = [{name = "Node Contributors", email = "dev@conestack.org"}] 13 | keywords = ["node", "tree", "fullmapping", "dict"] 14 | classifiers = [ 15 | "Development Status :: 5 - Production/Stable", 16 | "License :: OSI Approved :: BSD License", 17 | "Intended Audience :: Developers", 18 | "Operating System :: OS Independent", 19 | "Topic :: Software Development", 20 | "Programming Language :: Python", 21 | "Programming Language :: Python :: 3.10", 22 | "Programming Language :: Python :: 3.11", 23 | "Programming Language :: Python :: 3.12", 24 | "Programming Language :: Python :: 3.13", 25 | "Programming Language :: Python :: 3.14", 26 | ] 27 | dependencies = [ 28 | "odict>=1.9.0", 29 | "plumber>=1.5", 30 | "zope.component", 31 | "zope.deferredimport", 32 | "zope.deprecation", 33 | "zope.lifecycleevent", 34 | ] 35 | 36 | [project.optional-dependencies] 37 | test = [ 38 | "pytest", 39 | ] 40 | 41 | [project.urls] 42 | Homepage = "http://github.com/conestack/node" 43 | 44 | [tool.hatch.metadata.hooks.fancy-pypi-readme] 45 | content-type = "text/x-rst" 46 | fragments = [ 47 | {path = "README.rst"}, 48 | {text = "\n\n"}, 49 | {path = "CHANGES.rst"}, 50 | {text = "\n\n"}, 51 | {path = "LICENSE.rst"}, 52 | ] 53 | 54 | [tool.hatch.build.targets.sdist] 55 | exclude = [ 56 | "/.github/", 57 | "/Makefile", 58 | "/mx.ini", 59 | ] 60 | 61 | [tool.hatch.build.targets.wheel] 62 | packages = ["src/node"] 63 | 64 | [tool.pytest.ini_options] 65 | consider_namespace_packages = true 66 | addopts = ["--import-mode=importlib"] 67 | pythonpath = "src" 68 | 69 | [tool.zest-releaser] 70 | create-wheel = true 71 | -------------------------------------------------------------------------------- /src/node/base.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import AsAttrAccess 2 | from node.behaviors import Attributes 3 | from node.behaviors import DefaultInit 4 | from node.behaviors import DictStorage 5 | from node.behaviors import ListStorage 6 | from node.behaviors import MappingAdopt 7 | from node.behaviors import MappingConstraints 8 | from node.behaviors import MappingNode 9 | from node.behaviors import MappingReference 10 | from node.behaviors import Nodespaces 11 | from node.behaviors import OdictStorage 12 | from node.behaviors import MappingOrder 13 | from node.behaviors import SequenceAdopt 14 | from node.behaviors import SequenceConstraints 15 | from node.behaviors import SequenceNode 16 | from plumber import plumbing 17 | 18 | 19 | @plumbing( 20 | MappingAdopt, 21 | MappingNode) 22 | class AbstractNode(object): 23 | pass 24 | 25 | 26 | @plumbing( 27 | MappingConstraints, 28 | MappingAdopt, 29 | AsAttrAccess, 30 | DefaultInit, 31 | MappingNode, 32 | DictStorage) 33 | class BaseNode(object): 34 | """Base node, not ordered. 35 | 36 | Uses ``dict`` as mapping implementation. 37 | """ 38 | 39 | 40 | @plumbing( 41 | MappingConstraints, 42 | MappingAdopt, 43 | AsAttrAccess, 44 | DefaultInit, 45 | MappingNode, 46 | OdictStorage) 47 | class OrderedNode(object): 48 | """Ordered node. 49 | 50 | Uses ``odict`` as mapping implementation. 51 | """ 52 | 53 | 54 | @plumbing( 55 | SequenceConstraints, 56 | SequenceAdopt, 57 | DefaultInit, 58 | SequenceNode, 59 | ListStorage) 60 | class ListNode(object): 61 | """Sequence node. 62 | 63 | Uses ``list`` as sequence implementation. 64 | """ 65 | 66 | 67 | ############################################################################### 68 | # B/C from zodict. 69 | # XXX: will be removed soon 70 | ############################################################################### 71 | 72 | @plumbing( 73 | MappingConstraints, 74 | Nodespaces, 75 | MappingAdopt, 76 | Attributes, 77 | MappingReference, 78 | MappingOrder, 79 | AsAttrAccess, 80 | DefaultInit, 81 | MappingNode, 82 | OdictStorage) 83 | class Node(object): 84 | """A node with original functionality from zodict.node.Node.""" 85 | 86 | 87 | @plumbing( 88 | MappingConstraints, 89 | Nodespaces, 90 | MappingAdopt, 91 | Attributes, 92 | MappingReference, 93 | MappingOrder, 94 | AsAttrAccess, 95 | DefaultInit, 96 | MappingNode, 97 | OdictStorage) 98 | class AttributedNode(object): 99 | """A node with original functionality from zodict.node.AttributedNode.""" 100 | -------------------------------------------------------------------------------- /src/node/behaviors/__init__.py: -------------------------------------------------------------------------------- 1 | from .adopt import MappingAdopt # noqa 2 | from .adopt import SequenceAdopt # noqa 3 | from .alias import Alias # noqa 4 | from .attributes import Attributes # noqa 5 | from .attributes import NodeAttributes # noqa 6 | from .cache import Cache # noqa 7 | from .cache import Invalidate # noqa 8 | from .cache import VolatileStorageInvalidate # noqa 9 | from .common import AsAttrAccess # noqa 10 | from .common import UnicodeAware # noqa 11 | from .common import UUIDAware # noqa 12 | from .constraints import MappingConstraints # noqa 13 | from .constraints import SequenceConstraints # noqa 14 | from .context import BoundContext # noqa 15 | from .events import EventAttribute # noqa 16 | from .events import Events # noqa 17 | from .events import suppress_events # noqa 18 | from .events import UnknownEvent # noqa 19 | from .factories import ChildFactory # noqa 20 | from .factories import FixedChildren # noqa 21 | from .factories import WildcardFactory # noqa 22 | from .fallback import Fallback # noqa 23 | from .filter import MappingFilter # noqa 24 | from .filter import SequenceFilter # noqa 25 | from .lifecycle import AttributesLifecycle # noqa 26 | from .lifecycle import Lifecycle # noqa 27 | from .lifecycle import suppress_lifecycle_events # noqa 28 | from .mapping import ClonableMapping # noqa 29 | from .mapping import EnumerableMapping # noqa 30 | from .mapping import ExtendedReadMapping # noqa 31 | from .mapping import ExtendedWriteMapping # noqa 32 | from .mapping import FullMapping # noqa 33 | from .mapping import ItemMapping # noqa 34 | from .mapping import IterableMapping # noqa 35 | from .mapping import Mapping # noqa 36 | from .mapping import MappingNode # noqa 37 | from .mapping import ReadMapping # noqa 38 | from .mapping import WriteMapping # noqa 39 | from .node import ContentishNode # noqa 40 | from .node import DefaultInit # noqa 41 | from .node import Node # noqa 42 | from .node import NodeInit # noqa 43 | from .nodespace import Nodespaces # noqa 44 | from .order import MappingOrder # noqa 45 | from .order import SequenceOrder # noqa 46 | from .reference import IndexViolationError # noqa 47 | from .reference import MappingReference # noqa 48 | from .reference import NodeIndex # noqa 49 | from .reference import NodeReference # noqa 50 | from .reference import SequenceReference # noqa 51 | from .schema import Schema # noqa 52 | from .schema import SchemaAsAttributes # noqa 53 | from .schema import SchemaAttributes # noqa 54 | from .schema import SchemaProperties # noqa 55 | from .sequence import MutableSequence # noqa 56 | from .sequence import Sequence # noqa 57 | from .sequence import SequenceNode # noqa 58 | from .storage import DictStorage # noqa 59 | from .storage import ListStorage # noqa 60 | from .storage import MappingStorage # noqa 61 | from .storage import OdictStorage # noqa 62 | from .storage import SequenceStorage # noqa 63 | from zope.deferredimport import deprecated 64 | 65 | 66 | # B/C 2022-02-16 -> node.behaviors.Adopt 67 | deprecated( 68 | '``Adopt`` has been renamed to ``MappingAdopt``. Please fix your import', 69 | Adopt='node.behaviors.adopt:MappingAdopt', 70 | ) 71 | # B/C 2022-02-22 -> node.behaviors.NodeChildValidate 72 | deprecated( 73 | '``NodeChildValidate`` has been renamed to ``MappingConstraints``. ' 74 | 'Please fix your import', 75 | NodeChildValidate='node.behaviors.constraints:MappingConstraints', 76 | ) 77 | # B/C 2022-02-14 -> node.behaviors.Nodify 78 | deprecated( 79 | '``Nodify`` has been renamed to ``MappingNode``. Please fix your import', 80 | Nodify='node.behaviors.mapping:MappingNode', 81 | ) 82 | # B/C 2022-11-22 -> node.behaviors.Order 83 | deprecated( 84 | '``Order`` has been renamed to ``MappingOrder``. Please fix your import', 85 | Order='node.behaviors.order:MappingOrder', 86 | ) 87 | # B/C 2022-05-06 -> node.behaviors.Reference 88 | deprecated( 89 | '``Reference`` has been renamed to ``MappingReference``. ' 90 | 'Please fix your import', 91 | Reference='node.behaviors.reference:MappingReference', 92 | ) 93 | # B/C 2022-02-14 -> node.behaviors.Storage 94 | deprecated( 95 | '``Storage`` has been renamed to ``MappingStorage``. Please fix your import', 96 | Storage='node.behaviors.storage:MappingStorage', 97 | ) 98 | -------------------------------------------------------------------------------- /src/node/behaviors/adopt.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from contextlib import contextmanager 3 | from node.interfaces import IMappingAdopt 4 | from node.interfaces import INode 5 | from node.interfaces import ISequenceAdopt 6 | from plumber import Behavior 7 | from plumber import plumb 8 | from zope.interface import implementer 9 | 10 | 11 | @contextmanager 12 | def adopt_node(name, parent, value): 13 | """Context manager for setting name and parent on node. If exception 14 | occurs, name and parent gets reverted to original values. 15 | """ 16 | # Only care about adoption if we have a node. 17 | if not INode.providedBy(value): 18 | yield 19 | return 20 | # Save old __parent__ and __name__ to restore if something goes wrong. 21 | old_name = value.__name__ 22 | old_parent = value.__parent__ 23 | value.__name__ = name 24 | value.__parent__ = parent 25 | try: 26 | yield 27 | except Exception: 28 | value.__name__ = old_name 29 | value.__parent__ = old_parent 30 | raise 31 | 32 | 33 | @implementer(IMappingAdopt) 34 | class MappingAdopt(Behavior): 35 | 36 | @plumb 37 | def __setitem__(next_, self, key, value): 38 | with adopt_node(key, self, value): 39 | next_(self, key, value) 40 | 41 | @plumb 42 | def setdefault(next_, self, key, default=None): 43 | # Reroute through ``__getitem__`` and ``__setitem__``, skipping 44 | # ``next_``. 45 | try: 46 | return self[key] 47 | except KeyError: 48 | self[key] = default 49 | return default 50 | 51 | 52 | @implementer(ISequenceAdopt) 53 | class SequenceAdopt(Behavior): 54 | 55 | @plumb 56 | def __setitem__(next_, self, index, value): 57 | if type(index) is slice: 58 | raise NotImplementedError('No slice support.') 59 | with adopt_node(str(index), self, value): 60 | next_(self, index, value) 61 | 62 | @plumb 63 | def insert(next_, self, index, value): 64 | with adopt_node(str(index), self, value): 65 | next_(self, index, value) 66 | -------------------------------------------------------------------------------- /src/node/behaviors/alias.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IAlias 3 | from node.interfaces import IAliaser 4 | from node.utils import ReverseMapping 5 | from odict import odict 6 | from plumber import Behavior 7 | from plumber import default 8 | from plumber import plumb 9 | from zope.interface import implementer 10 | from zope.interface.common.mapping import IEnumerableMapping 11 | from zope.interface.common.mapping import IFullMapping 12 | 13 | 14 | @implementer(IAliaser, IFullMapping) 15 | class DictAliaser(odict): 16 | """Uses its own dictionary for aliasing. 17 | 18 | ``__getitem__`` -> unalias 19 | """ 20 | 21 | def __init__(self, data=(), strict=True): 22 | super(DictAliaser, self).__init__(data) 23 | self.strict = strict 24 | 25 | def alias(self, key): 26 | try: 27 | return ReverseMapping(self)[key] 28 | except KeyError as e: 29 | if not self.strict: 30 | return key 31 | raise e 32 | 33 | def unalias(self, aliased_key): 34 | try: 35 | return self[aliased_key] 36 | except KeyError as e: 37 | if not self.strict: 38 | return aliased_key 39 | raise e 40 | 41 | 42 | @implementer(IAliaser) 43 | class PrefixAliaser(object): 44 | """An aliaser that prefix all keys. 45 | 46 | As it never raise KeyError it is not whitelisting. 47 | """ 48 | 49 | def __init__(self, prefix=None): 50 | self.prefix = prefix 51 | 52 | def alias(self, key): 53 | return (self.prefix or '') + key 54 | 55 | def unalias(self, prefixed_key): 56 | """Returns the real key for a prefixed_key.""" 57 | prefix = self.prefix or '' 58 | if not prefixed_key.startswith(prefix): 59 | raise KeyError(u"key '{}' does not match prefix '{}'".format( 60 | prefixed_key, 61 | prefix 62 | )) 63 | return prefixed_key[len(prefix):] 64 | 65 | 66 | @implementer(IAliaser) 67 | class SuffixAliaser(object): 68 | """An aliaser that suffixes all keys. 69 | 70 | As it never raise KeyError it is not whitelisting. 71 | """ 72 | 73 | def __init__(self, suffix=None): 74 | self.suffix = suffix 75 | 76 | def alias(self, key): 77 | return key + (self.suffix or '') 78 | 79 | def unalias(self, suffixed_key): 80 | """returns the real key for a suffixed_key.""" 81 | suffix = self.suffix or '' 82 | if not suffixed_key.endswith(suffix): 83 | raise KeyError(u"key '{}' does not match suffix '{}'".format( 84 | suffixed_key, 85 | suffix 86 | )) 87 | return suffixed_key[:-len(suffix)] 88 | 89 | 90 | @implementer(IAliaser) 91 | class AliaserChain(object): 92 | """A chain of aliasers. 93 | 94 | chain = [aliaser1, aliaser2] 95 | chain.alias(key) == aliaser2.alias(aliaser1.alias(key)) 96 | chain.unalias(alias_key) == aliaser2.unalias(aliaser1.unalias(aliased_key)) 97 | """ 98 | 99 | def __init__(self, chain=None): 100 | self.chain = chain 101 | 102 | def alias(self, key): 103 | for aliaser in self.chain: 104 | key = aliaser.alias(key) 105 | return key 106 | 107 | def unalias(self, key): 108 | for aliaser in reversed(self.chain): 109 | key = aliaser.unalias(key) 110 | return key 111 | 112 | 113 | class PrefixSuffixAliaser(AliaserChain): 114 | """Prefixes and suffixes.""" 115 | 116 | def __init__(self, prefix=None, suffix=None): 117 | self.chain = (PrefixAliaser(prefix), SuffixAliaser(suffix)) 118 | 119 | 120 | @implementer(IAlias) 121 | class Alias(Behavior): 122 | aliaser = default(None) 123 | 124 | @plumb 125 | def __getitem__(next_, self, key): 126 | if self.aliaser: 127 | unaliased_key = self.aliaser.unalias(key) 128 | else: 129 | unaliased_key = key 130 | try: 131 | return next_(self, unaliased_key) 132 | except KeyError: 133 | raise KeyError(key) 134 | 135 | @plumb 136 | def __setitem__(next_, self, key, val): 137 | if self.aliaser: 138 | unaliased_key = self.aliaser.unalias(key) 139 | else: 140 | unaliased_key = key 141 | try: 142 | next_(self, unaliased_key, val) 143 | except KeyError: 144 | raise KeyError(key) 145 | 146 | @plumb 147 | def __delitem__(next_, self, key): 148 | if self.aliaser: 149 | unaliased_key = self.aliaser.unalias(key) 150 | else: 151 | unaliased_key = key 152 | try: 153 | next_(self, unaliased_key) 154 | except KeyError: 155 | raise KeyError(key) 156 | 157 | @plumb 158 | def __iter__(next_, self): 159 | for key in next_(self): 160 | try: 161 | if self.aliaser: 162 | yield self.aliaser.alias(key) 163 | else: 164 | yield key 165 | except KeyError: 166 | if IEnumerableMapping.providedBy(self.aliaser): 167 | # an enumerable aliaser whitelists, we skip non-listed keys 168 | continue 169 | raise 170 | -------------------------------------------------------------------------------- /src/node/behaviors/attributes.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.behaviors.adopt import MappingAdopt 3 | from node.behaviors.constraints import MappingConstraints 4 | from node.behaviors.mapping import MappingNode 5 | from node.behaviors.storage import OdictStorage 6 | from node.compat import IS_PY2 7 | from node.interfaces import IAttributes 8 | from node.interfaces import INodeAttributes 9 | from node.interfaces import INodespaces 10 | from node.utils import AttributeAccess 11 | from plumber import Behavior 12 | from plumber import default 13 | from plumber import finalize 14 | from plumber import plumbing 15 | from zope.interface import implementer 16 | 17 | 18 | @plumbing( 19 | MappingConstraints, 20 | MappingAdopt, 21 | MappingNode, 22 | OdictStorage) 23 | @implementer(INodeAttributes) 24 | class NodeAttributes(object): 25 | child_constraints = None 26 | 27 | def __init__(self, name=None, parent=None): 28 | self.__name__ = name 29 | self.__parent__ = parent 30 | self.context = parent # B/C 2011-01-31 31 | self._node = parent # B/C 2011-01-31 32 | 33 | def __repr__(self): 34 | name = self.parent.name.encode('ascii', 'replace') \ 35 | if IS_PY2 and isinstance(self.parent.name, unicode) \ 36 | else str(self.parent.name) 37 | return '<{} object \'{}\' at {}>'.format( 38 | self.__class__.__name__, 39 | name, 40 | hex(id(self))[:-1] 41 | ) 42 | 43 | 44 | @implementer(IAttributes) 45 | class Attributes(Behavior): 46 | attribute_access_for_attrs = default(False) 47 | attributes_factory = default(NodeAttributes) 48 | 49 | @finalize 50 | @property 51 | def attrs(self): 52 | if INodespaces.providedBy(self): 53 | try: 54 | attrs = self.nodespaces['__attrs__'] 55 | except KeyError: 56 | attrs = self.nodespaces['__attrs__'] = self.attributes_factory( 57 | name='__attrs__', 58 | parent=self 59 | ) 60 | else: 61 | try: 62 | attrs = self.__attrs__ 63 | except AttributeError: 64 | attrs = self.__attrs__ = self.attributes_factory( 65 | name='__attrs__', 66 | parent=self 67 | ) 68 | if self.attribute_access_for_attrs: 69 | return AttributeAccess(attrs) 70 | return attrs 71 | 72 | # B/C 73 | attributes = finalize(attrs) 74 | -------------------------------------------------------------------------------- /src/node/behaviors/cache.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.compat import IS_PY2 3 | from node.interfaces import ICache 4 | from node.interfaces import IInvalidate 5 | from node.utils import instance_property 6 | from plumber import Behavior 7 | from plumber import default 8 | from plumber import plumb 9 | from zope.interface import implementer 10 | 11 | 12 | def _keys(obj): 13 | """Compat function to always get keys as list. 14 | 15 | Currently used to avoid modification of dict while iterating in python 3. 16 | for key in _keys(ob): 17 | del ob[key] 18 | 19 | Can probably be replaced by just using. 20 | ob.clear() 21 | 22 | Business logic changes after whole stack has been migrated to python 3. 23 | """ 24 | return obj.keys() if IS_PY2 else list(obj.keys()) 25 | 26 | 27 | @implementer(IInvalidate) 28 | class Invalidate(Behavior): 29 | """Plumbing behavior for invalidation. 30 | 31 | This basic implementation assumes that nodes using this behavior are NOT 32 | storage related. It just uses ``self.__delitem__``. 33 | """ 34 | 35 | @default 36 | def invalidate(self, key=None): 37 | """Raise KeyError if child does not exist for key if given. 38 | """ 39 | if key is not None: 40 | del self[key] 41 | else: 42 | for key in _keys(self): 43 | del self[key] 44 | 45 | 46 | @implementer(IInvalidate) 47 | class VolatileStorageInvalidate(Behavior): 48 | """Plumbing behavior for invalidating volatile storages like 49 | ``DictStorage`` or ``OdictStorage``. 50 | """ 51 | 52 | @default 53 | def invalidate(self, key=None): 54 | """Raise KeyError if child does not exist for key if given. 55 | """ 56 | storage = self.storage 57 | if key is not None: 58 | if key in _keys(self): 59 | try: 60 | del storage[key] 61 | except KeyError: 62 | pass # ignore, key is valid, but not on storage right now 63 | else: 64 | raise KeyError(key) 65 | else: 66 | for key in _keys(storage): 67 | del storage[key] 68 | 69 | 70 | @implementer(ICache) 71 | class Cache(Behavior): 72 | 73 | @default 74 | @instance_property 75 | def cache(self): 76 | """Default cache is a dict on self.""" 77 | return dict() 78 | 79 | @plumb 80 | def invalidate(next_, self, key=None): 81 | cache = self.cache 82 | if key is not None: 83 | try: 84 | del cache[key] 85 | except KeyError: 86 | pass 87 | else: 88 | for key in _keys(cache): 89 | del cache[key] 90 | next_(self, key=key) 91 | 92 | @plumb 93 | def __getitem__(next_, self, key): 94 | cache = self.cache 95 | try: 96 | return cache[key] 97 | except KeyError: 98 | cache[key] = next_(self, key) 99 | return cache[key] 100 | 101 | @plumb 102 | def __setitem__(next_, self, key, value): 103 | try: 104 | del self.cache[key] 105 | except KeyError: 106 | pass 107 | next_(self, key, value) 108 | 109 | @plumb 110 | def __delitem__(next_, self, key): 111 | try: 112 | del self.cache[key] 113 | except KeyError: 114 | pass 115 | next_(self, key) 116 | 117 | @plumb 118 | def __iter__(next_, self): 119 | # do not cache keys on default implementation. 120 | return next_(self) 121 | -------------------------------------------------------------------------------- /src/node/behaviors/common.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IAsAttrAccess 3 | from node.interfaces import IUnicodeAware 4 | from node.interfaces import IUUIDAware 5 | from node.utils import AttributeAccess 6 | from node.utils import decode 7 | from plumber import Behavior 8 | from plumber import default 9 | from plumber import plumb 10 | from zope.interface import implementer 11 | import uuid 12 | 13 | 14 | @implementer(IAsAttrAccess) 15 | class AsAttrAccess(Behavior): 16 | 17 | @default 18 | def as_attribute_access(self): 19 | return AttributeAccess(self) 20 | 21 | 22 | @implementer(IUnicodeAware) 23 | class UnicodeAware(Behavior): 24 | # XXX: It feels here it would be nice to be able to get an instance of a 25 | # plumbing to configure the codec. 26 | 27 | @plumb 28 | def __delitem__(next_, self, key): 29 | if isinstance(key, str): 30 | key = decode(key) 31 | next_(self, key) 32 | 33 | @plumb 34 | def __getitem__(next_, self, key): 35 | if isinstance(key, str): 36 | key = decode(key) 37 | return next_(self, key) 38 | 39 | @plumb 40 | def __setitem__(next_, self, key, val): 41 | if isinstance(key, str): 42 | key = decode(key) 43 | if isinstance(val, str): 44 | val = decode(val) 45 | return next_(self, key, val) 46 | 47 | 48 | @implementer(IUUIDAware) 49 | class UUIDAware(Behavior): 50 | uuid = default(None) 51 | overwrite_recursiv_on_copy = default(True) 52 | 53 | @plumb 54 | def __init__(next_, self, *args, **kw): 55 | next_(self, *args, **kw) 56 | if not self.uuid: 57 | self.uuid = self.uuid_factory() 58 | 59 | @plumb 60 | def copy(next_, self): 61 | msg = 'Shallow copy useless on UUID aware node trees, use deepcopy.' 62 | raise RuntimeError(msg) 63 | 64 | @plumb 65 | def deepcopy(next_, self): 66 | copied = next_(self) 67 | self.set_uuid_for(copied, True, self.overwrite_recursiv_on_copy) 68 | return copied 69 | 70 | @default 71 | def uuid_factory(self): 72 | return uuid.uuid4() 73 | 74 | @default 75 | def set_uuid_for(self, node, override=False, recursiv=False): 76 | if IUUIDAware.providedBy(node): 77 | if override or not node.uuid: 78 | node.uuid = self.uuid_factory() 79 | if recursiv: 80 | for child in node.values(): 81 | self.set_uuid_for(child, override, recursiv) 82 | -------------------------------------------------------------------------------- /src/node/behaviors/constraints.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IConstraints 3 | from node.interfaces import IMappingConstraints 4 | from node.interfaces import INode 5 | from node.interfaces import ISequenceConstraints 6 | from plumber import Behavior 7 | from plumber import default 8 | from plumber import plumb 9 | from zope.interface import implementer 10 | from zope.interface.interfaces import IInterface 11 | import warnings 12 | 13 | 14 | def child_constraints(node): 15 | if hasattr(node, 'allow_non_node_childs'): 16 | warnings.warn( 17 | '``allow_non_node_childs`` is deprecated, ' 18 | 'use ``child_constraints`` instead' 19 | ) 20 | constraints = tuple() if node.allow_non_node_childs else (INode,) 21 | elif hasattr(node, 'allow_non_node_children'): 22 | warnings.warn( 23 | '``allow_non_node_children`` is deprecated, ' 24 | 'use ``child_constraints`` instead' 25 | ) 26 | constraints = tuple() if node.allow_non_node_children else (INode,) 27 | else: 28 | constraints = node.child_constraints 29 | constraints = constraints if constraints else tuple() 30 | return constraints 31 | 32 | 33 | def check_constraints(node, value): 34 | for constraint in child_constraints(node): 35 | if IInterface.providedBy(constraint): 36 | if not constraint.providedBy(value): 37 | raise ValueError( 38 | 'Given value does not implement {}'.format(constraint) 39 | ) 40 | elif not isinstance(value, constraint): 41 | raise ValueError( 42 | 'Given value is no instance of {}'.format(constraint.__name__) 43 | ) 44 | 45 | 46 | @implementer(IConstraints) 47 | class Constraints(Behavior): 48 | child_constraints = default((INode,)) 49 | 50 | 51 | @implementer(IMappingConstraints) 52 | class MappingConstraints(Constraints): 53 | 54 | @plumb 55 | def __setitem__(next_, self, key, value): 56 | check_constraints(self, value) 57 | next_(self, key, value) 58 | 59 | 60 | @implementer(ISequenceConstraints) 61 | class SequenceConstraints(Constraints): 62 | 63 | @plumb 64 | def __setitem__(next_, self, index, value): 65 | check_constraints(self, value) 66 | next_(self, index, value) 67 | 68 | @plumb 69 | def insert(next_, self, index, value): 70 | check_constraints(self, value) 71 | next_(self, index, value) 72 | -------------------------------------------------------------------------------- /src/node/behaviors/context.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IBoundContext 3 | from plumber import Behavior 4 | from plumber import default 5 | from zope.interface import implementer 6 | from zope.interface.interfaces import IInterface 7 | import inspect 8 | 9 | 10 | @implementer(IBoundContext) 11 | class BoundContext(Behavior): 12 | __bound_context_interfaces__ = default(()) 13 | __bound_context_classes__ = default(()) 14 | 15 | @default 16 | @classmethod 17 | def bind_context(cls, *context): 18 | if cls.__bound_context_interfaces__ + cls.__bound_context_classes__: 19 | raise RuntimeError('Class context already bound') 20 | interfaces = [] 21 | classes = [] 22 | for ob in context: 23 | if not ob: 24 | continue 25 | if IInterface.providedBy(ob): 26 | interfaces.append(ob) 27 | elif inspect.isclass(ob): 28 | classes.append(ob) 29 | else: 30 | raise ValueError('Context is neither an interface nor a class') 31 | cls.__bound_context_interfaces__ = tuple(interfaces) 32 | cls.__bound_context_classes__ = tuple(classes) 33 | 34 | @default 35 | def context_matches(self, obj): 36 | interfaces = self.__bound_context_interfaces__ 37 | classes = self.__bound_context_classes__ 38 | if not interfaces + classes: 39 | return True 40 | for interface in interfaces: 41 | if interface.providedBy(obj): 42 | return True 43 | for class_ in classes: 44 | if isinstance(obj, class_): 45 | return True 46 | return False 47 | -------------------------------------------------------------------------------- /src/node/behaviors/factories.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.compat import lru_cache 3 | from node.interfaces import IChildFactory 4 | from node.interfaces import IFixedChildren 5 | from node.interfaces import IWildcardFactory 6 | from odict import odict 7 | from plumber import Behavior 8 | from plumber import default 9 | from plumber import finalize 10 | from plumber import override 11 | from plumber import plumb 12 | from zope.interface import implementer 13 | import fnmatch 14 | import itertools 15 | import warnings 16 | 17 | 18 | @implementer(IChildFactory) 19 | class ChildFactory(Behavior): 20 | factories = default(odict()) 21 | 22 | @override 23 | def __iter__(self): 24 | return self.factories.__iter__() 25 | 26 | iterkeys = override(__iter__) 27 | 28 | @plumb 29 | def __getitem__(next_, self, key): 30 | try: 31 | child = next_(self, key) 32 | except KeyError: 33 | factory = self.factories[key] 34 | try: 35 | child = factory(name=key, parent=self) 36 | except TypeError: 37 | warnings.warn( 38 | 'Calling child factory without arguments is deprecated. ' 39 | 'Adopt your factory to accept ``name`` and ``parent``.' 40 | ) 41 | child = factory() 42 | self[key] = child 43 | return child 44 | 45 | 46 | @implementer(IFixedChildren) 47 | class FixedChildren(Behavior): 48 | """Behavior that initializes a fixed dictionary as children. 49 | 50 | The children are instantiated during __init__. 51 | """ 52 | factories = default(odict()) 53 | 54 | @plumb 55 | def __init__(next_, self, *args, **kw): 56 | next_(self, *args, **kw) 57 | self._children = odict() 58 | if hasattr(self, 'fixed_children_factories'): 59 | warnings.warn( 60 | '``fixed_children_factories`` is deprecated, ' 61 | 'use ``factories`` instead' 62 | ) 63 | factories = self.fixed_children_factories 64 | # This is a B/C interface contract violation hack. The interface 65 | # describes the factories as dict, but prior to node 1.1 the 66 | # implementation expected a tuple or list 67 | factories = ( 68 | odict(factories) if isinstance(factories, (list, tuple)) 69 | else factories 70 | ) 71 | else: 72 | factories = self.factories 73 | for key, factory in factories.items(): 74 | try: 75 | child = factory(name=key, parent=self) 76 | except TypeError: 77 | warnings.warn( 78 | 'Calling child factory without arguments is deprecated. ' 79 | 'Adopt your factoriy to accept ``name`` and ``parent``.' 80 | ) 81 | child = factory() 82 | child.__name__ = key 83 | child.__parent__ = self 84 | self._children[key] = child 85 | 86 | @finalize 87 | def __setitem__(self, key, val): 88 | raise NotImplementedError('read-only') 89 | 90 | @finalize 91 | def __getitem__(self, key): 92 | return self._children[key] 93 | 94 | @finalize 95 | def __delitem__(self, key): 96 | raise NotImplementedError('read-only') 97 | 98 | @finalize 99 | def __iter__(self): 100 | return iter(self._children) 101 | 102 | 103 | @lru_cache(maxsize=32768) 104 | def _wildcard_pattern_occurrences(pattern): 105 | # count characters, asterisks, question_marks and sequences in pattern 106 | # a whole sequencs counts as one character 107 | # https://man7.org/linux/man-pages/man7/glob.7.html 108 | chars = asterisks = question_marks = sequences = 0 109 | in_sequence = 0 110 | for char in pattern: 111 | if not in_sequence and char == '[': 112 | in_sequence += 1 113 | continue 114 | if in_sequence: 115 | if in_sequence < 2 or char != ']': 116 | in_sequence += 1 117 | continue 118 | else: 119 | in_sequence = 0 120 | sequences += 1 121 | if char == '*': 122 | asterisks += 1 123 | elif char == '?': 124 | question_marks += 1 125 | chars += 1 126 | if in_sequence: 127 | raise ValueError('Pattern contains non-closing sequence') 128 | return chars, asterisks, question_marks, sequences 129 | 130 | 131 | @lru_cache(maxsize=32768) 132 | def _wildcard_patterns_by_specificity(patterns): 133 | """Simple wildcard pattern weighting. 134 | 135 | Limitations: 136 | * Sequences are not weighted. 137 | * Max 100 sequences in pattern. 138 | * Max 100 question_marks in pattern. 139 | * Max 100 asterisks in pattern. 140 | 141 | If we want to have a proper weighting of all pattern aspects, we'd need to 142 | view patterns as finite state machines and count all required states 143 | necessary to resolve the pattern. This count can then be used as weight. 144 | https://github.com/adrian-thurston/ragel might be a starting point if we 145 | somewhen want to implement this. 146 | """ 147 | specificities = [ 148 | [], # patterns with no wildcards 149 | [], # patterns with sequences 150 | [], # patterns with sequences and question marks 151 | [] # patterns with all wildcards 152 | ] 153 | weights = dict() 154 | for pattern in patterns: 155 | ( 156 | chars, 157 | asterisks, 158 | question_marks, 159 | sequences 160 | ) = _wildcard_pattern_occurrences(pattern) 161 | weights[pattern] = ( 162 | 0 - chars + 163 | sequences / 1000000. + 164 | question_marks / 10000. + 165 | asterisks / 100. 166 | ) 167 | # patterns with no wildcards 168 | if asterisks + question_marks + sequences == 0: 169 | specificities[0].append(pattern) 170 | # patterns with sequences 171 | elif asterisks + question_marks == 0: 172 | specificities[1].append(pattern) 173 | # patterns with sequences and question marks 174 | elif asterisks == 0: 175 | specificities[2].append(pattern) 176 | # patterns with all wildcards 177 | else: 178 | specificities[3].append(pattern) 179 | return tuple(itertools.chain.from_iterable([ 180 | sorted(specificity, key=lambda x: weights[x]) 181 | for specificity in specificities 182 | ])) 183 | 184 | 185 | @implementer(IWildcardFactory) 186 | class WildcardFactory(Behavior): 187 | factories = default(odict()) 188 | pattern_weighting = default(True) 189 | 190 | @default 191 | def factory_for_pattern(self, name): 192 | factories = self.factories 193 | patterns = ( 194 | _wildcard_patterns_by_specificity(tuple(factories)) 195 | if self.pattern_weighting 196 | else factories 197 | ) 198 | for pattern in patterns: 199 | if fnmatch.fnmatchcase(name, pattern): 200 | return factories[pattern] 201 | -------------------------------------------------------------------------------- /src/node/behaviors/fallback.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IFallback 3 | from plumber import Behavior 4 | from plumber import default 5 | from plumber import plumb 6 | from zope.interface import implementer 7 | import threading 8 | 9 | 10 | _marker = dict() 11 | 12 | 13 | class fallback_processing(object): 14 | data = threading.local() 15 | data.processing = -1 16 | 17 | def __enter__(self): 18 | self.data.processing += 1 19 | return self.data.processing 20 | 21 | def __exit__(self, type, value, traceback): 22 | self.data.processing -= 1 23 | 24 | 25 | def _to_root(node, path, visited): 26 | """Traverse to root searching next fallback key. If no more parent, break. 27 | """ 28 | parent = node.__parent__ 29 | if parent is None: 30 | return _marker 31 | if not getattr(parent, 'fallback_key', _marker) or node in visited: 32 | return _to_root(parent, path=path, visited=visited) 33 | visited.update({node}) 34 | return _to_leaf(parent[parent.fallback_key], path=path, visited=visited) 35 | 36 | 37 | def _to_leaf(node, path, visited): 38 | """Traverse children, searching for fallback key.""" 39 | current = node 40 | for name in path[len(current.path):]: 41 | new_current = current.get(name, _marker) 42 | if new_current is _marker: 43 | return _to_root(current, path=path, visited=visited) 44 | current = new_current 45 | return current 46 | 47 | 48 | @implementer(IFallback) 49 | class Fallback(Behavior): 50 | fallback_key = default(_marker) 51 | 52 | @plumb 53 | def __getitem__(next_, self, key): 54 | """If key not found, look for fallback_key on parent(s) with the same 55 | subpath, take it's children and look there, fall back to unvisited 56 | parents until no fallback left. 57 | """ 58 | try: 59 | value = next_(self, key) 60 | except KeyError: 61 | with fallback_processing() as count: 62 | if count > 0: 63 | raise 64 | value = _to_root(self, path=self.path + [key], visited=set()) 65 | if value is _marker: 66 | raise 67 | return value 68 | -------------------------------------------------------------------------------- /src/node/behaviors/filter.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IChildFilter 3 | from plumber import Behavior 4 | from plumber import default 5 | from zope.interface import implementer 6 | from zope.interface.interfaces import IInterface 7 | 8 | 9 | def filter_objects(objects, filter): 10 | if IInterface.providedBy(filter): 11 | return [ob for ob in objects if filter.providedBy(ob)] 12 | else: 13 | return [ob for ob in objects if isinstance(ob, filter)] 14 | 15 | 16 | @implementer(IChildFilter) 17 | class MappingFilter(Behavior): 18 | 19 | @default 20 | def filtered_children(self, filter): 21 | return filter_objects(self.values(), filter) 22 | 23 | 24 | @implementer(IChildFilter) 25 | class SequenceFilter(Behavior): 26 | 27 | @default 28 | def filtered_children(self, filter): 29 | return filter_objects(self, filter) 30 | -------------------------------------------------------------------------------- /src/node/behaviors/lifecycle.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from contextlib import contextmanager 3 | from node.events import NodeAddedEvent 4 | from node.events import NodeCreatedEvent 5 | from node.events import NodeDetachedEvent 6 | from node.events import NodeModifiedEvent 7 | from node.events import NodeRemovedEvent 8 | from node.interfaces import IAttributesLifecycle 9 | from node.interfaces import ILifecycle 10 | from plumber import Behavior 11 | from plumber import default 12 | from plumber import plumb 13 | from zope.component.event import objectEventNotify 14 | from zope.interface import implementer 15 | import threading 16 | 17 | 18 | class LifecycleContext(threading.local): 19 | suppress_events = False 20 | 21 | 22 | _lifecycle_context = LifecycleContext() 23 | 24 | 25 | @contextmanager 26 | def suppress_lifecycle_events(): 27 | """Context manager to suppress lifecycle events.""" 28 | _lifecycle_context.suppress_events = True 29 | try: 30 | yield 31 | finally: 32 | _lifecycle_context.suppress_events = False 33 | 34 | 35 | @implementer(ILifecycle) 36 | class Lifecycle(Behavior): 37 | 38 | events = default({ 39 | 'created': NodeCreatedEvent, 40 | 'added': NodeAddedEvent, 41 | 'modified': NodeModifiedEvent, 42 | 'removed': NodeRemovedEvent, 43 | 'detached': NodeDetachedEvent, 44 | }) 45 | 46 | @plumb 47 | def __init__(next_, self, *args, **kw): 48 | next_(self, *args, **kw) 49 | objectEventNotify(self.events['created'](self)) 50 | 51 | @plumb 52 | def __setitem__(next_, self, key, val): 53 | next_(self, key, val) 54 | if _lifecycle_context.suppress_events: 55 | return 56 | objectEventNotify(self.events['added']( 57 | val, 58 | newParent=self, 59 | newName=key 60 | )) 61 | 62 | @plumb 63 | def __delitem__(next_, self, key): 64 | delnode = self[key] 65 | next_(self, key) 66 | if _lifecycle_context.suppress_events: 67 | return 68 | objectEventNotify(self.events['removed']( 69 | delnode, 70 | oldParent=self, 71 | oldName=key 72 | )) 73 | 74 | @plumb 75 | def detach(next_, self, key): 76 | with suppress_lifecycle_events(): 77 | node = next_(self, key) 78 | objectEventNotify(self.events['detached']( 79 | node, 80 | oldParent=self, 81 | oldName=key 82 | )) 83 | return node 84 | 85 | 86 | @implementer(IAttributesLifecycle) 87 | class AttributesLifecycle(Behavior): 88 | 89 | @plumb 90 | def __setitem__(next_, self, key, val): 91 | next_(self, key, val) 92 | if _lifecycle_context.suppress_events: 93 | return 94 | objectEventNotify(self.__parent__.events['modified'](self.__parent__)) 95 | 96 | @plumb 97 | def __delitem__(next_, self, key): 98 | next_(self, key) 99 | if _lifecycle_context.suppress_events: 100 | return 101 | objectEventNotify(self.__parent__.events['modified'](self.__parent__)) 102 | -------------------------------------------------------------------------------- /src/node/behaviors/mapping.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.behaviors.node import ContentishNode 3 | from node.compat import ITER_FUNC 4 | from node.compat import iteritems 5 | from node.interfaces import IMappingNode 6 | from node.utils import UNSET 7 | from plumber import Behavior 8 | from plumber import default 9 | from plumber import override 10 | from plumber import plumb 11 | from zope.interface import implementer 12 | from zope.interface.common.mapping import IClonableMapping 13 | from zope.interface.common.mapping import IEnumerableMapping 14 | from zope.interface.common.mapping import IExtendedReadMapping 15 | from zope.interface.common.mapping import IExtendedWriteMapping 16 | from zope.interface.common.mapping import IFullMapping 17 | from zope.interface.common.mapping import IItemMapping 18 | from zope.interface.common.mapping import IIterableMapping 19 | from zope.interface.common.mapping import IMapping 20 | from zope.interface.common.mapping import IReadMapping 21 | from zope.interface.common.mapping import IWriteMapping 22 | import copy 23 | 24 | 25 | @implementer(IItemMapping) 26 | class ItemMapping(Behavior): 27 | """Simplest readable mapping object.""" 28 | 29 | @default 30 | def __getitem__(self, key): 31 | raise NotImplementedError 32 | 33 | 34 | @implementer(IReadMapping) 35 | class ReadMapping(ItemMapping): 36 | """Basic mapping interface.""" 37 | 38 | @default 39 | def get(self, key, default=None): 40 | """Uses ``__getitem__``.""" 41 | try: 42 | return self[key] 43 | except KeyError: 44 | return default 45 | 46 | @default 47 | def __contains__(self, key): 48 | """Uses ``__getitem__``. 49 | 50 | This should be overriden by storages (using override), where 51 | ``__getitem__`` is expensive. 52 | 53 | XXX: also catching the exception is expensive, so this should be 54 | overriden probably always. 55 | """ 56 | try: 57 | self[key] 58 | except KeyError: 59 | return False 60 | return True 61 | 62 | 63 | @implementer(IWriteMapping) 64 | class WriteMapping(Behavior): 65 | """Mapping methods for changing data.""" 66 | 67 | @default 68 | def __delitem__(self, key): 69 | raise NotImplementedError 70 | 71 | @default 72 | def __setitem__(self, key, value): 73 | raise NotImplementedError 74 | 75 | 76 | @implementer(IEnumerableMapping) 77 | class EnumerableMapping(ReadMapping): 78 | """Mapping objects whose items can be enumerated.""" 79 | 80 | @default 81 | def keys(self): 82 | """Uses ``__iter__``.""" 83 | return [x for x in self] 84 | 85 | @default 86 | def __iter__(self): 87 | raise NotImplementedError 88 | 89 | @default 90 | def values(self): 91 | """Uses ``__iter__`` and ``__getitem__``. 92 | 93 | return values in key order 94 | """ 95 | return [self[key] for key in self] 96 | 97 | @default 98 | def items(self): 99 | """Uses ``__iter__`` and ``__getitem__``. 100 | 101 | return items in key order 102 | """ 103 | return [(key, self[key]) for key in self] 104 | 105 | @default 106 | def __len__(self): 107 | """Uses ``keys``.""" 108 | return len(self.keys()) 109 | 110 | 111 | @implementer(IMapping) 112 | class Mapping(WriteMapping, EnumerableMapping): 113 | """Simple mapping interface.""" 114 | 115 | 116 | @implementer(IIterableMapping) 117 | class IterableMapping(EnumerableMapping): 118 | 119 | @default 120 | def iterkeys(self): 121 | """Uses ``__iter__``.""" 122 | return self.__iter__() 123 | 124 | @default 125 | def itervalues(self): 126 | """Uses ``__iter__`` and ``__getitem__``. 127 | 128 | Iterate values in key order. 129 | """ 130 | for key in self: 131 | yield self[key] 132 | 133 | @default 134 | def iteritems(self): 135 | """Uses ``__iter__`` and ``__getitem__``. 136 | 137 | Iterate items in key order. 138 | """ 139 | for key in self: 140 | yield key, self[key] 141 | 142 | 143 | @implementer(IClonableMapping) 144 | class ClonableMapping(Behavior): 145 | 146 | @default 147 | def copy(self): 148 | return copy.copy(self) 149 | 150 | @default 151 | def deepcopy(self): 152 | # not part of IClonableMapping API 153 | return copy.deepcopy(self) 154 | 155 | 156 | @implementer(IExtendedReadMapping) 157 | class ExtendedReadMapping(IterableMapping): 158 | 159 | @default 160 | def has_key(self, key): 161 | """Uses ``__iter__``.""" 162 | return key in self 163 | 164 | 165 | @implementer(IExtendedWriteMapping) 166 | class ExtendedWriteMapping(WriteMapping): 167 | 168 | @default 169 | def clear(self): 170 | """Works only if together with EnumerableMapping.""" 171 | for key in self.keys(): 172 | del self[key] 173 | 174 | @default 175 | def update(self, *args, **kw): 176 | if len(args) > 1: 177 | msg = 'At most one positional argument, not: {}.'.format(len(args)) 178 | raise TypeError(msg) 179 | if args: 180 | data = args[0] 181 | if hasattr(data, ITER_FUNC): 182 | data = iteritems(data) 183 | for key, val in data: 184 | self[key] = val 185 | for key, val in iteritems(kw): 186 | self[key] = val 187 | 188 | @default 189 | def setdefault(self, key, default=None): 190 | """Works only if together with ReadMapping.""" 191 | try: 192 | return self[key] 193 | except KeyError: 194 | self[key] = default 195 | return default 196 | 197 | @default 198 | def pop(self, key, default=UNSET): 199 | """Works only if together with ReadMapping.""" 200 | try: 201 | val = self[key] 202 | del self[key] 203 | except KeyError: 204 | if default is UNSET: 205 | raise 206 | val = default 207 | return val 208 | 209 | @default 210 | def popitem(self): 211 | """Works only if together with IterableMapping.""" 212 | for key in reversed(self.keys()): 213 | val = self[key] 214 | del self[key] 215 | return key, val 216 | raise KeyError('popitem(): mapping is empty') 217 | 218 | 219 | @implementer(IFullMapping) 220 | class FullMapping(ExtendedReadMapping, 221 | ExtendedWriteMapping, 222 | ClonableMapping, 223 | Mapping): 224 | """Provides defaults for IFullMapping 225 | 226 | NotImplementedError is raised by defaults for: 227 | - ``__delitem__`` 228 | - ``__getitem__`` 229 | - ``__iter__`` 230 | - ``__setitem__`` 231 | """ 232 | 233 | 234 | @implementer(IMappingNode) 235 | class MappingNode(ContentishNode, FullMapping): 236 | 237 | @plumb 238 | def copy(next_, self): 239 | new = next_(self) 240 | new.__name__ = self.__name__ 241 | new.__parent__ = self.__parent__ 242 | return new 243 | 244 | @override 245 | def filtereditervalues(self, interface): 246 | for val in self.itervalues(): 247 | if interface.providedBy(val): 248 | yield val 249 | 250 | @override 251 | def filteredvalues(self, interface): 252 | return [val for val in self.filtereditervalues(interface)] 253 | 254 | # B/C 2010-12-23 255 | filtereditems = override(filtereditervalues) 256 | -------------------------------------------------------------------------------- /src/node/behaviors/node.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.compat import IS_PY2 3 | from node.interfaces import IContentishNode 4 | from node.interfaces import IDefaultInit 5 | from node.interfaces import IMappingNode 6 | from node.interfaces import INode 7 | from node.interfaces import INodeInit 8 | from node.interfaces import IOrdered 9 | from node.interfaces import ISchemaProperties 10 | from node.interfaces import ISequenceNode 11 | from node.utils import LocationIterator 12 | from node.utils import safe_decode 13 | from plumber import Behavior 14 | from plumber import default 15 | from plumber import override 16 | from plumber import plumb 17 | from zope.interface import implementer 18 | from zope.interface.interfaces import IInterface 19 | 20 | 21 | @implementer(IDefaultInit) 22 | class DefaultInit(Behavior): 23 | 24 | @override 25 | def __init__(self, name=None, parent=None): 26 | self.__name__ = name 27 | self.__parent__ = parent 28 | 29 | 30 | @implementer(INodeInit) 31 | class NodeInit(Behavior): 32 | 33 | @plumb 34 | def __init__(next_, self, *args, **kwargs): 35 | self.__name__ = kwargs.pop('name', None) 36 | self.__parent__ = kwargs.pop('parent', None) 37 | next_(self, *args, **kwargs) 38 | 39 | 40 | @implementer(INode) 41 | class Node(Behavior): 42 | __name__ = default(None) 43 | __parent__ = default(None) 44 | 45 | @override 46 | @property 47 | def name(self): 48 | return self.__name__ 49 | 50 | @override 51 | @property 52 | def parent(self): 53 | return self.__parent__ 54 | 55 | @override 56 | @property 57 | def path(self): 58 | path = [parent.name for parent in LocationIterator(self)] 59 | path.reverse() 60 | return path 61 | 62 | @override 63 | @property 64 | def root(self): 65 | root = None 66 | for parent in LocationIterator(self): 67 | root = parent 68 | return root 69 | 70 | @override 71 | def acquire(self, interface): 72 | node = self.parent 73 | while node: 74 | if ( 75 | ( 76 | IInterface.providedBy(interface) 77 | and interface.providedBy(node) 78 | ) 79 | or isinstance(node, interface) 80 | ): 81 | return node 82 | node = node.parent 83 | 84 | @default 85 | def __nonzero__(self): 86 | return True 87 | 88 | __bool__ = default(__nonzero__) 89 | 90 | @override 91 | def __repr__(self): 92 | class_name = self.__class__.__name__ 93 | name = self.name.encode('ascii', 'replace') \ 94 | if IS_PY2 and isinstance(self.name, unicode) \ 95 | else str(self.name) 96 | return '<{} object \'{}\' at {}>'.format( 97 | class_name, 98 | name, 99 | hex(id(self))[:-1] 100 | ) 101 | 102 | __str__ = override(__repr__) 103 | 104 | @override 105 | @property 106 | def noderepr(self): 107 | """``noderepr`` is used in ``treerepr``. 108 | 109 | Thus, we can overwrite it in subclass and return any debug information 110 | we need while ``__repr__`` is an enhanced standard object 111 | representation, also used as ``__str__`` on nodes. 112 | """ 113 | class_name = self.__class__ 114 | name = self.name.encode('ascii', 'replace') \ 115 | if IS_PY2 and isinstance(self.name, unicode) \ 116 | else str(self.name) 117 | return str(class_name) + ': ' + name[name.find(':') + 1:] 118 | 119 | @override 120 | def treerepr(self, indent=0, prefix=' '): 121 | res = '{}{}\n'.format(indent * prefix, self.noderepr) 122 | children = list() 123 | schema_members = set() 124 | if ISchemaProperties.providedBy(self): 125 | def collect_schema_members(cls): 126 | for schema_member in getattr(cls, '__schema_members__', []): 127 | schema_members.add(schema_member) 128 | for base in cls.__bases__: 129 | collect_schema_members(base) 130 | collect_schema_members(self.__class__) 131 | children += sorted([ 132 | (name, getattr(self, name)) 133 | for name in schema_members 134 | ], key=lambda x: x[0]) 135 | if IMappingNode.providedBy(self): 136 | items = list() 137 | for key in self: 138 | try: 139 | value = self[key] 140 | except Exception as e: 141 | value = repr(e) 142 | items.append((key, value)) 143 | sorted_items = ( 144 | items 145 | if IOrdered.providedBy(self) 146 | else sorted(items, key=lambda x: safe_decode(x[0])) 147 | ) 148 | for item in sorted_items: 149 | if item[0] not in schema_members: 150 | children.append(item) 151 | elif ISequenceNode.providedBy(self): 152 | children += [(index, value) for index, value in enumerate(self)] 153 | for name, value in children: 154 | if INode.providedBy(value): 155 | res += value.treerepr(indent=indent + 2, prefix=prefix) 156 | else: 157 | res += '{}{}: {}\n'.format( 158 | (indent + 2) * prefix, 159 | name, 160 | repr(value) 161 | ) 162 | return res 163 | 164 | @override 165 | def printtree(self): 166 | print(self.treerepr()) # pragma: no cover 167 | 168 | 169 | @implementer(IContentishNode) 170 | class ContentishNode(Node): 171 | 172 | @override 173 | def detach(self, name): 174 | node = self[name] 175 | del self[name] 176 | node.__parent__ = None 177 | return node 178 | -------------------------------------------------------------------------------- /src/node/behaviors/nodespace.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.compat import STR_TYPE 3 | from node.interfaces import INodespaces 4 | from odict import odict 5 | from plumber import Behavior 6 | from plumber import default 7 | from plumber import finalize 8 | from plumber import plumb 9 | from zope.interface import implementer 10 | 11 | 12 | @implementer(INodespaces) 13 | class Nodespaces(Behavior): 14 | _nodespaces = default(None) 15 | 16 | @finalize 17 | @property 18 | def nodespaces(self): 19 | """A storage and general way to access our nodespaces. 20 | 21 | An ``AttributedNode`` uses this to store the ``attrs`` nodespace i.e. 22 | """ 23 | if self._nodespaces is None: 24 | self._nodespaces = odict() 25 | self._nodespaces['__children__'] = self 26 | return self._nodespaces 27 | 28 | @plumb 29 | def __getitem__(next_, self, key): 30 | # blend in our nodespaces as children, with name ____ 31 | # isinstance check is required because odict tries to get item possibly 32 | # with ``_nil`` key, which is actually an object 33 | if isinstance(key, STR_TYPE) \ 34 | and key.startswith('__') \ 35 | and key.endswith('__'): 36 | # a reserved child key mapped to the nodespace behind 37 | # nodespaces[key], nodespaces is an odict 38 | return self.nodespaces[key] 39 | return next_(self, key) 40 | 41 | @plumb 42 | def __setitem__(next_, self, key, val): 43 | # blend in our nodespaces as children, with name ____ 44 | if key.startswith('__') and key.endswith('__'): 45 | # a reserved child key mapped to the nodespace behind 46 | # nodespaces[key], nodespaces is an odict 47 | val.__name__ = key 48 | val.__parent__ = self 49 | self.nodespaces[key] = val 50 | # index checks below must not happen for other nodespace. 51 | return 52 | next_(self, key, val) 53 | 54 | @plumb 55 | def __delitem__(next_, self, key): 56 | # blend in our nodespaces as children, with name ____ 57 | if key.startswith('__') and key.endswith('__'): 58 | # a reserved child key mapped to the nodespace behind 59 | # nodespaces[key], nodespaces is an odict 60 | del self.nodespaces[key] 61 | return 62 | next_(self, key) 63 | -------------------------------------------------------------------------------- /src/node/behaviors/order.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IMappingOrder 3 | from node.interfaces import INode 4 | from node.interfaces import ISequenceOrder 5 | from plumber import Behavior 6 | from plumber import override 7 | from zope.interface import implementer 8 | 9 | 10 | @implementer(IMappingOrder) 11 | class MappingOrder(Behavior): 12 | 13 | @override 14 | @property 15 | def first_key(self): 16 | return self.storage.first_key 17 | 18 | @override 19 | @property 20 | def last_key(self): 21 | return self.storage.last_key 22 | 23 | @override 24 | def next_key(self, key): 25 | return self.storage.next_key(key) 26 | 27 | @override 28 | def prev_key(self, key): 29 | return self.storage.prev_key(key) 30 | 31 | @override 32 | def swap(self, node_a, node_b): 33 | name_a = node_a.name if INode.providedBy(node_a) else node_a 34 | name_b = node_b.name if INode.providedBy(node_b) else node_b 35 | self.storage.swap(name_a, name_b) 36 | 37 | @override 38 | def insertbefore(self, newnode, refnode): 39 | self._validateinsertion(newnode) 40 | newnode_name = newnode.name 41 | refnode_name = refnode.name if INode.providedBy(refnode) else refnode 42 | try: 43 | self.storage[refnode_name] 44 | self[newnode_name] = newnode 45 | self.storage.movebefore(refnode_name, newnode_name) 46 | except KeyError: 47 | raise ValueError('Given reference node not child of self.') 48 | 49 | @override 50 | def insertafter(self, newnode, refnode): 51 | self._validateinsertion(newnode) 52 | newnode_name = newnode.name 53 | refnode_name = refnode.name if INode.providedBy(refnode) else refnode 54 | try: 55 | self.storage[refnode_name] 56 | self[newnode_name] = newnode 57 | self.storage.moveafter(refnode_name, newnode_name) 58 | except KeyError: 59 | raise ValueError('Given reference node not child of self.') 60 | 61 | @override 62 | def insertfirst(self, newnode): 63 | self._validateinsertion(newnode) 64 | newnode_name = newnode.name 65 | self[newnode_name] = newnode 66 | self.storage.movefirst(newnode_name) 67 | 68 | @override 69 | def insertlast(self, newnode): 70 | self._validateinsertion(newnode) 71 | newnode_name = newnode.name 72 | self[newnode_name] = newnode 73 | self.storage.movelast(newnode_name) 74 | 75 | @override 76 | def movebefore(self, movenode, refnode): 77 | movenode_name = movenode.name if INode.providedBy(movenode) else movenode 78 | refnode_name = refnode.name if INode.providedBy(refnode) else refnode 79 | self.storage.movebefore(refnode_name, movenode_name) 80 | 81 | @override 82 | def moveafter(self, movenode, refnode): 83 | movenode_name = movenode.name if INode.providedBy(movenode) else movenode 84 | refnode_name = refnode.name if INode.providedBy(refnode) else refnode 85 | self.storage.moveafter(refnode_name, movenode_name) 86 | 87 | @override 88 | def movefirst(self, movenode): 89 | movenode_name = movenode.name if INode.providedBy(movenode) else movenode 90 | self.storage.movefirst(movenode_name) 91 | 92 | @override 93 | def movelast(self, movenode): 94 | movenode_name = movenode.name if INode.providedBy(movenode) else movenode 95 | self.storage.movelast(movenode_name) 96 | 97 | @override 98 | def _validateinsertion(self, node): 99 | name = node.name 100 | if name is None: 101 | raise ValueError('Given node has no __name__ set.') 102 | if name in self.storage: 103 | msg = 'Tree already contains node with name {}'.format(name) 104 | raise KeyError(msg) 105 | 106 | 107 | @implementer(ISequenceOrder) 108 | class SequenceOrder(Behavior): 109 | 110 | @override 111 | @property 112 | def first_index(self): 113 | if not self.storage: 114 | raise IndexError('Sequence is empty') 115 | return 0 116 | 117 | @override 118 | @property 119 | def last_index(self): 120 | if not self.storage: 121 | raise IndexError('Sequence is empty') 122 | return len(self.storage) - 1 123 | 124 | @override 125 | def next_index(self, index): 126 | index += 1 127 | if index > self.last_index: 128 | raise IndexError('No next index') 129 | return index 130 | 131 | @override 132 | def prev_index(self, index): 133 | index -= 1 134 | if index < self.first_index: 135 | raise IndexError('No previous index') 136 | return index 137 | 138 | @override 139 | def swap(self, node_a, node_b): 140 | index_a = self._lookup_node_index(node_a) 141 | index_b = self._lookup_node_index(node_b) 142 | storage = self.storage 143 | storage[index_a], storage[index_b] = storage[index_b], storage[index_a] 144 | self._update_indices() 145 | 146 | @override 147 | def insertbefore(self, newnode, refnode): 148 | if newnode in self: 149 | raise ValueError('Given node already child of self.') 150 | ref_index = self._lookup_node_index(refnode) 151 | self.insert(ref_index, newnode) 152 | 153 | @override 154 | def insertafter(self, newnode, refnode): 155 | if newnode in self: 156 | raise ValueError('Given node already child of self.') 157 | ref_index = self._lookup_node_index(refnode) 158 | self.insert(ref_index + 1, newnode) 159 | 160 | @override 161 | def insertfirst(self, newnode): 162 | if newnode in self: 163 | raise ValueError('Given node already child of self.') 164 | self.insert(0, newnode) 165 | 166 | @override 167 | def insertlast(self, newnode): 168 | if newnode in self: 169 | raise ValueError('Given node already child of self.') 170 | self.append(newnode) 171 | 172 | @override 173 | def movebefore(self, movenode, refnode): 174 | move_index = self._lookup_node_index(movenode) 175 | ref_index = self._lookup_node_index(refnode) 176 | storage = self.storage 177 | move_val = storage[move_index] 178 | storage.insert(ref_index, move_val) 179 | if ref_index > move_index: 180 | del storage[move_index] 181 | else: 182 | del storage[move_index + 1] 183 | self._update_indices() 184 | 185 | @override 186 | def moveafter(self, movenode, refnode): 187 | move_index = self._lookup_node_index(movenode) 188 | ref_index = self._lookup_node_index(refnode) 189 | storage = self.storage 190 | move_val = storage[move_index] 191 | storage.insert(ref_index + 1, move_val) 192 | if ref_index > move_index: 193 | del storage[move_index] 194 | else: 195 | del storage[move_index + 1] 196 | self._update_indices() 197 | 198 | @override 199 | def movefirst(self, movenode): 200 | move_index = self._lookup_node_index(movenode) 201 | storage = self.storage 202 | move_val = storage[move_index] 203 | del storage[move_index] 204 | storage.insert(0, move_val) 205 | self._update_indices() 206 | 207 | @override 208 | def movelast(self, movenode): 209 | move_index = self._lookup_node_index(movenode) 210 | storage = self.storage 211 | move_val = storage[move_index] 212 | del storage[move_index] 213 | storage.append(move_val) 214 | self._update_indices() 215 | 216 | @override 217 | def _lookup_node_index(self, node): 218 | storage = self.storage 219 | try: 220 | if INode.providedBy(node): 221 | index = int(node.name) 222 | else: 223 | index = int(node) 224 | if index < 0 or index + 1 > len(storage): 225 | raise ValueError() 226 | except (ValueError, TypeError): 227 | raise ValueError('Given reference node not child of self.') 228 | return index 229 | -------------------------------------------------------------------------------- /src/node/behaviors/reference.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IMappingNode 3 | from node.interfaces import IMappingReference 4 | from node.interfaces import INodeReference 5 | from node.interfaces import ISequenceNode 6 | from node.interfaces import ISequenceReference 7 | from plumber import Behavior 8 | from plumber import default 9 | from plumber import override 10 | from plumber import plumb 11 | from zope.interface import implementer 12 | from zope.interface.common.mapping import IReadMapping 13 | import uuid 14 | 15 | 16 | @implementer(IReadMapping) 17 | class NodeIndex(object): 18 | 19 | def __init__(self, index): 20 | self._index = index 21 | 22 | def __getitem__(self, key): 23 | return self._index[int(key)] 24 | 25 | def get(self, key, default=None): 26 | return self._index.get(int(key), default) 27 | 28 | def __contains__(self, key): 29 | return int(key) in self._index 30 | 31 | 32 | class IndexViolationError(ValueError): 33 | 34 | def __init__(self, message, colliding=[]): 35 | super(IndexViolationError, self).__init__(message) 36 | self.message = message 37 | self.colliding = [uuid.UUID(int=iuuid) for iuuid in colliding] 38 | 39 | def __repr__(self): 40 | return 'Index Violation: {}\n * {}'.format( 41 | self.message, 42 | '\n * '.join([str(uuid_) for uuid_ in self.colliding]) 43 | ) 44 | 45 | 46 | @implementer(INodeReference) 47 | class NodeReference(Behavior): 48 | _uuid = default(None) 49 | 50 | @plumb 51 | def __init__(next_, self, *args, **kw): 52 | self._index = dict() 53 | self.uuid = uuid.uuid4() 54 | next_(self, *args, **kw) 55 | 56 | @property 57 | def uuid(self): 58 | return self._uuid 59 | 60 | @override 61 | @uuid.setter 62 | def uuid(self, uuid): 63 | index = self._index 64 | iuuid = None if uuid is None else int(uuid) 65 | if iuuid in index and index[iuuid] is not self: 66 | raise IndexViolationError( 67 | 'Given uuid was already used for another Node', 68 | [iuuid] 69 | ) 70 | siuuid = None if self._uuid is None else int(self._uuid) 71 | if siuuid in index: 72 | del index[siuuid] 73 | index[iuuid] = self 74 | self._uuid = uuid 75 | 76 | @override 77 | @property 78 | def index(self): 79 | return NodeIndex(self._index) 80 | 81 | @override 82 | def node(self, uuid): 83 | return self._index.get(int(uuid)) 84 | 85 | @default 86 | @property 87 | def _referencable_child_nodes(self): 88 | children = [] 89 | if IMappingNode.providedBy(self): 90 | children = self.values() 91 | elif ISequenceNode.providedBy(self): 92 | children = self 93 | for child in children: 94 | if INodeReference.providedBy(child): 95 | yield child 96 | 97 | @default 98 | @property 99 | def _recursiv_reference_keys(self): 100 | keys = [int(self.uuid)] 101 | for node in self._referencable_child_nodes: 102 | keys += node._recursiv_reference_keys 103 | return keys 104 | 105 | @default 106 | def _init_reference_index(self): 107 | index = self._index = {int(self.uuid): self} 108 | def _init_children(node): 109 | for child in node._referencable_child_nodes: 110 | index[int(child.uuid)] = child 111 | child._index = index 112 | _init_children(child) 113 | _init_children(self) 114 | 115 | @default 116 | def _update_reference_index(self, value): 117 | if INodeReference.providedBy(value): 118 | index = self._index 119 | colliding = set(index).intersection(value._index) 120 | if colliding: 121 | raise IndexViolationError( 122 | ( 123 | 'Given node or members of it provide uuid(s) ' 124 | 'colliding with own index.' 125 | ), 126 | colliding 127 | ) 128 | index.update(value._index) 129 | def _set_index(node): 130 | node._index = index 131 | for child in node._referencable_child_nodes: 132 | _set_index(child) 133 | _set_index(value) 134 | 135 | @default 136 | def _reduce_reference_index(self, value): 137 | if INodeReference.providedBy(value): 138 | index = self._index 139 | for iuuid in value._recursiv_reference_keys: 140 | del index[iuuid] 141 | 142 | 143 | class ContentishNodeReference(NodeReference): 144 | 145 | @plumb 146 | def __delitem__(next_, self, name): 147 | # works on mapping and sequence nodes 148 | # fail immediately if name does not exist 149 | value = self[name] 150 | self._reduce_reference_index(value) 151 | next_(self, name) 152 | 153 | @plumb 154 | def detach(next_, self, key): 155 | node = next_(self, key) 156 | node._init_reference_index() 157 | return node 158 | 159 | @default 160 | def _overwrite_reference_index(self, name, value): 161 | existing = self.storage[name] 162 | self._reduce_reference_index(existing) 163 | try: 164 | self._update_reference_index(value) 165 | except IndexViolationError as e: 166 | if INodeReference.providedBy(existing): 167 | existing._init_reference_index() 168 | self._update_reference_index(existing) 169 | raise e 170 | 171 | 172 | @implementer(IMappingReference) 173 | class MappingReference(ContentishNodeReference): 174 | 175 | @plumb 176 | def __setitem__(next_, self, key, value): 177 | if INodeReference.providedBy(value) and value._index is self._index: 178 | raise IndexViolationError('Given node is already member of tree.') 179 | if key in self.storage: 180 | self._overwrite_reference_index(key, value) 181 | else: 182 | self._update_reference_index(value) 183 | next_(self, key, value) 184 | 185 | 186 | @implementer(ISequenceReference) 187 | class SequenceReference(ContentishNodeReference): 188 | 189 | @plumb 190 | def __setitem__(next_, self, index, value): 191 | if INodeReference.providedBy(value) and value._index is self._index: 192 | raise IndexViolationError('Given node is already member of tree.') 193 | self._overwrite_reference_index(int(index), value) 194 | next_(self, index, value) 195 | 196 | @plumb 197 | def insert(next_, self, index, value): 198 | if INodeReference.providedBy(value) and value._index is self._index: 199 | raise IndexViolationError('Given node is already member of tree.') 200 | self._update_reference_index(value) 201 | next_(self, index, value) 202 | -------------------------------------------------------------------------------- /src/node/behaviors/schema.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from contextlib import contextmanager 3 | from node.behaviors.mapping import MappingNode 4 | from node.interfaces import IAttributes 5 | from node.interfaces import INodeAttributes 6 | from node.interfaces import ISchema 7 | from node.interfaces import ISchemaAsAttributes 8 | from node.interfaces import ISchemaProperties 9 | from node.schema import Field 10 | from node.schema import scope_context 11 | from node.utils import AttributeAccess 12 | from node.utils import UNSET 13 | from plumber import Behavior 14 | from plumber import default 15 | from plumber import finalize 16 | from plumber import plumb 17 | from plumber import plumber 18 | from plumber import plumbing 19 | from zope.interface import implementer 20 | import threading 21 | 22 | 23 | @implementer(ISchema) 24 | class Schema(Behavior): 25 | schema = default(dict()) 26 | 27 | @plumb 28 | def __getitem__(next_, self, name): 29 | field = self.schema.get(name) 30 | if not field: 31 | return next_(self, name) 32 | with scope_context(field, name, self): 33 | try: 34 | return field.deserialize(next_(self, name)) 35 | except KeyError: 36 | return field.default 37 | 38 | @plumb 39 | def __setitem__(next_, self, name, value): 40 | field = self.schema.get(name) 41 | if not field: 42 | next_(self, name, value) 43 | return 44 | if value is UNSET: 45 | del self[name] 46 | return 47 | with scope_context(field, name, self): 48 | field.validate(value) 49 | next_(self, name, field.serialize(value)) 50 | 51 | 52 | @plumbing(MappingNode, Schema) 53 | @implementer(INodeAttributes) 54 | class SchemaAttributes(object): 55 | 56 | def __init__(self, name=None, parent=None): 57 | self.__name__ = name 58 | self.__parent__ = parent 59 | self.schema = parent.schema 60 | 61 | def __setitem__(self, name, value): 62 | if name not in self.schema: 63 | raise KeyError(name) 64 | self.parent.storage[name] = value 65 | 66 | def __getitem__(self, name): 67 | if name not in self.schema: 68 | raise KeyError(name) 69 | return self.parent.storage[name] 70 | 71 | def __delitem__(self, name): 72 | if name not in self.schema: 73 | raise KeyError(name) 74 | del self.parent.storage[name] 75 | 76 | def __iter__(self): 77 | return iter(self.schema) 78 | 79 | 80 | @implementer(IAttributes, ISchemaAsAttributes) 81 | class SchemaAsAttributes(Behavior): 82 | schema = default(dict()) 83 | attributes_factory = default(SchemaAttributes) 84 | attribute_access_for_attrs = default(False) 85 | 86 | @finalize 87 | @property 88 | def attrs(self): 89 | attrs = self.attributes_factory(name='__attrs__', parent=self) 90 | if self.attribute_access_for_attrs: 91 | attrs = AttributeAccess(attrs) 92 | return attrs 93 | 94 | @plumb 95 | def __setitem__(next_, self, name, value): 96 | if name in self.schema: 97 | raise KeyError('{} contained in schema. Use ``attrs``'.format(name)) 98 | next_(self, name, value) 99 | 100 | @plumb 101 | def __getitem__(next_, self, name): 102 | if name in self.schema: 103 | raise KeyError('{} contained in schema. Use ``attrs``'.format(name)) 104 | return next_(self, name) 105 | 106 | @plumb 107 | def __delitem__(next_, self, name): 108 | if name in self.schema: 109 | raise KeyError('{} contained in schema. Use ``attrs``'.format(name)) 110 | next_(self, name) 111 | 112 | @plumb 113 | def __iter__(next_, self): 114 | schema = self.schema 115 | for name in next_(self): 116 | if name in schema: 117 | continue 118 | yield name 119 | 120 | 121 | @plumber.metaclasshook 122 | def schema_properties_metclass_hook(cls, name, bases, dct): 123 | """Plumber metaclass hook handling proper post initialization of 124 | ``SchemaProperty`` instances on plumbing classes. 125 | """ 126 | if not ISchemaProperties.implementedBy(cls): 127 | return 128 | members = cls.__schema_members__ = list() 129 | for key, val in dct.items(): 130 | if isinstance(val, Field): 131 | members.append(key) 132 | setattr(cls, key, SchemaProperty(key, val)) 133 | 134 | 135 | class SchemaPropertyAccess(threading.local): 136 | name = None 137 | 138 | 139 | _schema_property = SchemaPropertyAccess() 140 | 141 | 142 | @contextmanager 143 | def _property_access(name): 144 | """Context manager to mark object property access from descriptor. 145 | """ 146 | _schema_property.name = name 147 | try: 148 | yield 149 | finally: 150 | _schema_property.name = None 151 | 152 | 153 | class SchemaProperty(object): 154 | """Descriptor object for schema properties. 155 | 156 | If a class gets plumbed with ``node.behaviors.SchemaProperties`` behavior, 157 | all class members holding a ``node.schema.Field`` instance get replaced by 158 | a ``SchemaProperty``. 159 | 160 | This descriptor used field validation and serialization for accessing 161 | and writing to the related object. 162 | 163 | The related object must be a mapping type and at least implement 164 | ``__getitem__``, ``__setitem__`` and ``__delitem__``. 165 | """ 166 | 167 | def __init__(self, name, field): 168 | """Create schema property instance. 169 | 170 | :param name: The property name. 171 | :param field: The related ``node.schema.Field`` instance. 172 | """ 173 | self.name = name 174 | self.field = field 175 | 176 | def __get__(self, obj, type_=None): 177 | """Get field value. 178 | 179 | :param obj: The related object. 180 | :param type_: The related object type. Not used. 181 | :return: If property gets accessed on class directly, field default 182 | value is returned. Otherwise read raw value from related object and 183 | return deserialized value. If related object not holds a value by 184 | field name, default value gets returned. 185 | """ 186 | field = self.field 187 | if obj is None: 188 | return field.default 189 | name = self.name 190 | with scope_context(field, name, obj): 191 | try: 192 | with _property_access(name): 193 | return field.deserialize(obj[name]) 194 | except KeyError: 195 | return field.default 196 | 197 | def __set__(self, obj, value): 198 | """Set field value. 199 | 200 | :param obj: The related object. 201 | :param value: The field value to set. If value is ``UNSET``, it gets 202 | deleted from related object. Otherwise validate given value and 203 | serialize it on related object. 204 | """ 205 | name = self.name 206 | if value is UNSET: 207 | with _property_access(name): 208 | del obj[name] 209 | return 210 | field = self.field 211 | with scope_context(field, name, self): 212 | field.validate(value) 213 | with _property_access(name): 214 | obj[name] = field.serialize(value) 215 | 216 | def __delete__(self, obj): 217 | """Delete field value from related object. 218 | 219 | :param obj: The related object. 220 | """ 221 | name = self.name 222 | with _property_access(name): 223 | del obj[name] 224 | 225 | 226 | @implementer(ISchemaProperties) 227 | class SchemaProperties(Behavior): 228 | """Plumbing behavior to provide schema fields as class properties. 229 | 230 | If a class gets plumbed with this behavior, all members which are an 231 | instance of ``node.schema.Field`` get replaced by a 232 | ``node.behaviors.SchemaProperty`` instance, which provides access to this 233 | object's data while taking validation and serialization into account. 234 | 235 | A class using this behavior must be a mapping type and at least implement 236 | ``__getitem__``, ``__setitem__`` and ``__delitem__``. 237 | 238 | Example usage: 239 | 240 | .. code-block:: python 241 | 242 | from node import schema 243 | from node.behaviors import SchemaProperties 244 | from node.utils import UNSET 245 | from plumber import plumbing 246 | 247 | @plumbing(SchemaProperties) 248 | class ObjectWithSchemaProperties(dict): 249 | title = schema.Str(default=u'No Title') 250 | description = schema.Str() 251 | 252 | obj = ObjectWithSchemaProperties() 253 | 254 | # values not set yet, defaults are returned. 255 | assert(obj.title == u'No Title') 256 | assert(obj.description is UNSET) 257 | assert(list(obj.keys()) == []) 258 | 259 | # when setting values, the get set on the mapping. 260 | obj.title = u'Title' 261 | obj.description = u'Description' 262 | assert(obj['title'] == u'Title') 263 | assert(obj['description'] == u'Description') 264 | 265 | # when setting values with UNSET, value gets deleted from mapping. 266 | obj.description = UNSET 267 | assert('description' not in obj) 268 | """ 269 | 270 | @plumb 271 | def __setitem__(next_, self, name, value): 272 | if _schema_property.name != name and name in self.__schema_members__: 273 | raise KeyError('{} is a schema property'.format(name)) 274 | next_(self, name, value) 275 | 276 | @plumb 277 | def __getitem__(next_, self, name): 278 | if _schema_property.name != name and name in self.__schema_members__: 279 | raise KeyError('{} is a schema property'.format(name)) 280 | return next_(self, name) 281 | 282 | @plumb 283 | def __delitem__(next_, self, name): 284 | if _schema_property.name != name and name in self.__schema_members__: 285 | raise KeyError('{} is a schema property'.format(name)) 286 | next_(self, name) 287 | 288 | @plumb 289 | def __iter__(next_, self): 290 | members = self.__schema_members__ 291 | for name in next_(self): 292 | if name in members: 293 | continue 294 | yield name 295 | -------------------------------------------------------------------------------- /src/node/behaviors/sequence.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | try: # pragma: no cover 3 | from collections.abc import MutableSequence as ABCMutableSequence 4 | from collections.abc import Sequence as ABCSequence 5 | except ImportError: # pragma: no cover 6 | from collections import MutableSequence as ABCMutableSequence 7 | from collections import Sequence as ABCSequence 8 | from node.behaviors.node import ContentishNode 9 | from node.compat import IS_PY2 10 | from node.interfaces import INode 11 | from node.interfaces import ISequenceNode 12 | from plumber import Behavior 13 | from plumber import default 14 | from plumber import override 15 | from plumber import plumb 16 | from zope.interface import implementer 17 | from zope.interface.common.collections import IMutableSequence 18 | from zope.interface.common.collections import ISequence 19 | import functools 20 | import types 21 | 22 | 23 | def copy_func(f): 24 | """Copy function. 25 | 26 | Based on http://stackoverflow.com/a/6528148/190597 (Glenn Maynard) 27 | """ 28 | fn = types.FunctionType( 29 | f.func_code if IS_PY2 else f.__code__, 30 | f.func_globals if IS_PY2 else f.__globals__, 31 | name=f.func_name if IS_PY2 else f.__name__, 32 | argdefs=f.func_defaults if IS_PY2 else f.__defaults__, 33 | closure=f.func_closure if IS_PY2 else f.__closure__ 34 | ) 35 | fn = functools.update_wrapper(fn, f) 36 | if not IS_PY2: # pragma: no cover 37 | fn.__kwdefaults__ = f.__kwdefaults__ 38 | return fn 39 | 40 | 41 | @implementer(ISequence) 42 | class Sequence(Behavior): 43 | __contains__ = default(copy_func(ABCSequence.__contains__)) 44 | __iter__ = default(copy_func(ABCSequence.__iter__)) 45 | __reversed__ = default(copy_func(ABCSequence.__reversed__)) 46 | count = default(copy_func(ABCSequence.count)) 47 | index = default(copy_func(ABCSequence.index)) 48 | 49 | @default 50 | def __len__(self): 51 | raise NotImplementedError 52 | 53 | @default 54 | def __getitem__(self, index): 55 | raise NotImplementedError 56 | 57 | 58 | @implementer(IMutableSequence) 59 | class MutableSequence(Sequence): 60 | __iadd__ = default(copy_func(ABCMutableSequence.__iadd__)) 61 | append = default(copy_func(ABCMutableSequence.append)) 62 | # Missing in python 2 63 | # clear = default(copy_func(ABCMutableSequence.clear)) 64 | extend = default(copy_func(ABCMutableSequence.extend)) 65 | pop = default(copy_func(ABCMutableSequence.pop)) 66 | remove = default(copy_func(ABCMutableSequence.remove)) 67 | reverse = default(copy_func(ABCMutableSequence.reverse)) 68 | 69 | @default 70 | def __setitem__(self, index, value): 71 | raise NotImplementedError 72 | 73 | @default 74 | def __delitem__(self, index): 75 | raise NotImplementedError 76 | 77 | @default 78 | def insert(self, index, value): 79 | raise NotImplementedError 80 | 81 | @default 82 | def clear(self): 83 | # Missing in python 2 84 | try: 85 | while True: 86 | self.pop() 87 | except IndexError: 88 | pass 89 | 90 | 91 | @implementer(ISequenceNode) 92 | class SequenceNode(ContentishNode, MutableSequence): 93 | 94 | @override 95 | def __index__(self): 96 | try: 97 | return int(self.__name__) 98 | except (TypeError, ValueError): 99 | raise IndexError('Node not member of a sequence node') 100 | 101 | @plumb 102 | def __getitem__(next_, self, index): 103 | if type(index) is not slice: 104 | index = int(index) 105 | return next_(self, index) 106 | 107 | @plumb 108 | def __setitem__(next_, self, index, value): 109 | if type(index) is not slice: 110 | index = int(index) 111 | next_(self, index, value) 112 | 113 | @plumb 114 | def __delitem__(next_, self, index): 115 | if type(index) is not slice: 116 | index = int(index) 117 | next_(self, index) 118 | self._update_indices() 119 | 120 | @plumb 121 | def insert(next_, self, index, value): 122 | next_(self, int(index), value) 123 | self._update_indices() 124 | 125 | @plumb 126 | def detach(next_, self, index): 127 | node = next_(self, int(index)) 128 | self._update_indices() 129 | return node 130 | 131 | @default 132 | def _update_indices(self): 133 | for index, value in enumerate(self): 134 | if INode.providedBy(value): 135 | value.__name__ = str(index) 136 | -------------------------------------------------------------------------------- /src/node/behaviors/storage.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from node.interfaces import IMappingStorage 3 | from node.interfaces import IOrdered 4 | from node.interfaces import ISequenceStorage 5 | from node.utils import instance_property 6 | from odict import odict 7 | from plumber import Behavior 8 | from plumber import default 9 | from plumber import override 10 | from zope.interface import implementer 11 | 12 | 13 | @implementer(IMappingStorage) 14 | class MappingStorage(Behavior): 15 | 16 | @default 17 | @property 18 | def storage(self): 19 | msg = 'Abstract ``MappingStorage`` does not implement ``storage``' 20 | raise NotImplementedError(msg) 21 | 22 | @override 23 | def __getitem__(self, key): 24 | return self.storage[key] 25 | 26 | @override 27 | def __delitem__(self, key): 28 | del self.storage[key] 29 | 30 | @override 31 | def __setitem__(self, key, val): 32 | self.storage[key] = val 33 | 34 | @override 35 | def __iter__(self): 36 | return self.storage.__iter__() 37 | 38 | 39 | class DictStorage(MappingStorage): 40 | 41 | @default 42 | @instance_property 43 | def storage(self): 44 | return dict() 45 | 46 | 47 | @implementer(IOrdered) 48 | class OdictStorage(MappingStorage): 49 | 50 | @default 51 | @instance_property 52 | def storage(self): 53 | return odict() 54 | 55 | 56 | @implementer(ISequenceStorage) 57 | class SequenceStorage(Behavior): 58 | 59 | @default 60 | @property 61 | def storage(self): 62 | msg = 'Abstract ``SequenceStorage`` does not implement ``storage``' 63 | raise NotImplementedError(msg) 64 | 65 | @override 66 | def __len__(self): 67 | return len(self.storage) 68 | 69 | @override 70 | def __getitem__(self, index): 71 | return self.storage[index] 72 | 73 | @override 74 | def __setitem__(self, index, value): 75 | self.storage[index] = value 76 | 77 | @override 78 | def __delitem__(self, index): 79 | del self.storage[index] 80 | 81 | @override 82 | def insert(self, index, value): 83 | self.storage.insert(index, value) 84 | 85 | 86 | class ListStorage(SequenceStorage): 87 | 88 | @default 89 | @instance_property 90 | def storage(self): 91 | return list() 92 | -------------------------------------------------------------------------------- /src/node/compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | IS_PY2 = sys.version_info[0] < 3 4 | IS_PYPY = '__pypy__' in sys.builtin_module_names 5 | STR_TYPE = basestring if IS_PY2 else str 6 | UNICODE_TYPE = unicode if IS_PY2 else str 7 | ITER_FUNC = 'iteritems' if IS_PY2 else 'items' 8 | 9 | 10 | def iteritems(obj): 11 | return getattr(obj, ITER_FUNC)() 12 | 13 | 14 | def func_name(func): 15 | return func.func_name if IS_PY2 else func.__name__ 16 | 17 | 18 | try: # pragma: no cover 19 | from functools import lru_cache 20 | except ImportError: # pragma: no cover 21 | class lru_cache: 22 | def __init__(self, **kwargs): 23 | pass 24 | def __call__(self, ob): 25 | return ob 26 | -------------------------------------------------------------------------------- /src/node/events.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import EventAttribute # noqa 2 | from node.behaviors import Events 3 | from node.behaviors import suppress_events # noqa 4 | from node.behaviors import UnknownEvent # noqa 5 | from node.interfaces import INodeAddedEvent 6 | from node.interfaces import INodeCreatedEvent 7 | from node.interfaces import INodeDetachedEvent 8 | from node.interfaces import INodeModifiedEvent 9 | from node.interfaces import INodeRemovedEvent 10 | from plumber import plumbing 11 | from zope.interface import implementer 12 | from zope.lifecycleevent import ObjectAddedEvent 13 | from zope.lifecycleevent import ObjectCreatedEvent 14 | from zope.lifecycleevent import ObjectModifiedEvent 15 | from zope.lifecycleevent import ObjectRemovedEvent 16 | 17 | 18 | ############################################################################### 19 | # Zope lifecycle events for ILifecycle 20 | ############################################################################### 21 | 22 | @implementer(INodeCreatedEvent) 23 | class NodeCreatedEvent(ObjectCreatedEvent): 24 | pass 25 | 26 | 27 | @implementer(INodeAddedEvent) 28 | class NodeAddedEvent(ObjectAddedEvent): 29 | pass 30 | 31 | 32 | @implementer(INodeModifiedEvent) 33 | class NodeModifiedEvent(ObjectModifiedEvent): 34 | pass 35 | 36 | 37 | @implementer(INodeRemovedEvent) 38 | class NodeRemovedEvent(ObjectRemovedEvent): 39 | pass 40 | 41 | 42 | @implementer(INodeDetachedEvent) 43 | class NodeDetachedEvent(ObjectRemovedEvent): 44 | pass 45 | 46 | 47 | ############################################################################### 48 | # Event dispatcher using IEvents plumbing behavior 49 | ############################################################################### 50 | 51 | @plumbing(Events) 52 | class EventDispatcher(object): 53 | """Object providing event dispatching.""" 54 | -------------------------------------------------------------------------------- /src/node/locking.py: -------------------------------------------------------------------------------- 1 | from threading import RLock 2 | 3 | 4 | class TreeLock(object): 5 | 6 | def __init__(self, node): 7 | root = node.root 8 | self.lock = getattr(root, '_treelock', None) 9 | if self.lock is None: 10 | self.lock = root._treelock = RLock() 11 | 12 | def acquire(self): 13 | self.lock.acquire() 14 | 15 | __enter__ = acquire 16 | 17 | def release(self): 18 | self.lock.release() 19 | 20 | def __exit__(self, type, value, traceback): 21 | self.release() 22 | 23 | 24 | def locktree(fn): 25 | """Decorator for locking of a whole method.""" 26 | def _locktree_decorator(self, *args, **kwargs): 27 | lock = TreeLock(self) 28 | lock.acquire() 29 | try: 30 | result = fn(self, *args, **kwargs) 31 | finally: 32 | lock.release() 33 | return result 34 | return _locktree_decorator 35 | -------------------------------------------------------------------------------- /src/node/schema/__init__.py: -------------------------------------------------------------------------------- 1 | from .fields import Bool # noqa 2 | from .fields import Bytes # noqa 3 | from .fields import DateTime # noqa 4 | from .fields import Dict # noqa 5 | from .fields import Field # noqa 6 | from .fields import Float # noqa 7 | from .fields import Int # noqa 8 | from .fields import IterableField # noqa 9 | from .fields import List # noqa 10 | from .fields import Node # noqa 11 | from .fields import ODict # noqa 12 | from .fields import Set # noqa 13 | from .fields import Str # noqa 14 | from .fields import Tuple # noqa 15 | from .fields import UUID # noqa 16 | from .scope import scope_context # noqa 17 | from .scope import ScopeContext # noqa 18 | from .serializer import Base64Serializer # noqa 19 | from .serializer import DateTimeSerializer # noqa 20 | from .serializer import FieldSerializer # noqa 21 | from .serializer import IterableSerializer # noqa 22 | from .serializer import JSONSerializer # noqa 23 | from .serializer import MappingSerializer # noqa 24 | from .serializer import NodeSerializer # noqa 25 | from .serializer import PickleSerializer # noqa 26 | from .serializer import TypeSerializer # noqa 27 | from .serializer import base64_serializer # noqa 28 | from .serializer import datetime_serializer # noqa 29 | from .serializer import dict_serializer # noqa 30 | from .serializer import float_serializer # noqa 31 | from .serializer import int_serializer # noqa 32 | from .serializer import json_serializer # noqa 33 | from .serializer import list_serializer # noqa 34 | from .serializer import odict_serializer # noqa 35 | from .serializer import pickle_serializer # noqa 36 | from .serializer import set_serializer # noqa 37 | from .serializer import tuple_serializer # noqa 38 | from .serializer import uuid_serializer # noqa 39 | -------------------------------------------------------------------------------- /src/node/schema/scope.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | 3 | 4 | @contextmanager 5 | def scope_context(context, name, parent): 6 | """Context manager for setting scope on context. 7 | 8 | Useful if ``Field`` or ``FieldSerializer`` implementations want to gather 9 | information from related model. 10 | 11 | :param context: ``ScopeContext`` instance. 12 | :param name: The field name in this scope. 13 | :param parent: The field containing model for this scope. 14 | """ 15 | context.name = name 16 | context.parent = parent 17 | try: 18 | yield context 19 | finally: 20 | context.name = context.parent = None 21 | 22 | 23 | class ScopeContext(object): 24 | """A scoped context. 25 | """ 26 | 27 | name = None 28 | """Name of the field while scoped.""" 29 | 30 | parent = None 31 | """The field containing model while scoped.""" 32 | -------------------------------------------------------------------------------- /src/node/schema/serializer.py: -------------------------------------------------------------------------------- 1 | from node.schema.scope import ScopeContext 2 | from odict import odict 3 | try: # pragma: no cover 4 | from urllib import quote 5 | from urllib import unquote 6 | except ImportError: # pragma: no cover 7 | from urllib.parse import quote 8 | from urllib.parse import unquote 9 | import base64 10 | import datetime 11 | import json 12 | import pickle 13 | import uuid 14 | 15 | 16 | class FieldSerializer(ScopeContext): 17 | """Field serializer. 18 | """ 19 | 20 | def dump(self, value): 21 | """Serialize given value. 22 | 23 | :param value: The value to serialize. 24 | :return: The serialized value. 25 | """ 26 | raise NotImplementedError( 27 | 'Abstract ``FieldSerializer`` does not implement ``dump``' 28 | ) 29 | 30 | def load(self, value): 31 | """Deserialize given value. 32 | 33 | :param value: The value to deserialize. 34 | :return: The deserialized value. 35 | """ 36 | raise NotImplementedError( 37 | 'Abstract ``FieldSerializer`` does not implement ``load``' 38 | ) 39 | 40 | 41 | class TypeSerializer(FieldSerializer): 42 | """Serializer for arbitrary types. 43 | 44 | Converts value to string on serialization. 45 | Creates instance of type from string on deserialization. 46 | """ 47 | 48 | def __init__(self, type_): 49 | """Create TypeSerializer instance. 50 | 51 | :param type_: Type to create at deserialization. 52 | """ 53 | self.type_ = type_ 54 | 55 | def dump(self, value): 56 | """Convert value to string. 57 | 58 | :param value: The value to convert. Must implement ``__str__`` 59 | :return: Converted value as string. 60 | """ 61 | return str(value) 62 | 63 | def load(self, value): 64 | """Create instance of ``type_`` from string. 65 | 66 | :param value: The string to convert. Gets passed to ``type_`` as only 67 | argument. ``type_`` must support creation from string. 68 | :return: Instance of ``type_``. 69 | """ 70 | return self.type_(value) 71 | 72 | 73 | int_serializer = TypeSerializer(int) 74 | float_serializer = TypeSerializer(float) 75 | uuid_serializer = TypeSerializer(uuid.UUID) 76 | 77 | 78 | class IterableSerializer(FieldSerializer): 79 | """Serializer for iterables. 80 | 81 | Joins iterable into comma separated strings on serialization. 82 | Splits comma separated string into iterable on deserialization. 83 | """ 84 | 85 | def __init__(self, type_): 86 | """Create IterableSerializer instance. 87 | 88 | :param type_: Type to create at deserialization. 89 | """ 90 | self.type_ = type_ 91 | 92 | def dump(self, value): 93 | """Join iterable value to string. 94 | 95 | :param value: The iterable to join. Must contain strings as values. 96 | :return: Items of iterable joined by ',' as string. 97 | """ 98 | items = sorted([quote(item) for item in value]) 99 | return u','.join(items) 100 | 101 | def load(self, value): 102 | """Split string into iterable. 103 | 104 | :param value: The string to split. 105 | :return: Instance of ``type_`` containing strings split by ',' from 106 | value. 107 | """ 108 | return self.type_([unquote(item) for item in value.split(u',')]) 109 | 110 | 111 | list_serializer = IterableSerializer(list) 112 | tuple_serializer = IterableSerializer(tuple) 113 | set_serializer = IterableSerializer(set) 114 | 115 | 116 | class MappingSerializer(FieldSerializer): 117 | """Serializer for mappings. 118 | 119 | Joins mapping key/value pairs into string on serialization. 120 | Splits string into mapping on deserialization. 121 | """ 122 | 123 | def __init__(self, type_): 124 | """Create MappingSerializer instance. 125 | 126 | :param type_: Type to create at deserialization. 127 | """ 128 | self.type_ = type_ 129 | 130 | def dump(self, value): 131 | """Join dict key/value pairs into string. 132 | 133 | :param value: The dict to join. Keys and values must be strings. 134 | :return: Items of dict joined by ';' as string. Key/value pairs are 135 | joined by ','. 136 | """ 137 | return u';'.join([ 138 | u'{key},{value}'.format(key=quote(key), value=quote(val)) 139 | for key, val in value.items() 140 | ]) 141 | 142 | def load(self, value): 143 | """Split string into dict. 144 | 145 | :param value: The string to split. 146 | :return: Dict from value. Items of dict are split by ';'. Key/value 147 | pairs are split by ','. 148 | """ 149 | ret = self.type_() 150 | for item in value.split(u';'): 151 | key, val = item.split(u',') 152 | ret[unquote(key)] = unquote(val) 153 | return ret 154 | 155 | 156 | dict_serializer = MappingSerializer(dict) 157 | odict_serializer = MappingSerializer(odict) 158 | 159 | 160 | class Base64Serializer(FieldSerializer): 161 | """Serializer for encoding/decoding values with base64 coding.""" 162 | 163 | coding = 'utf-8' 164 | 165 | def dump(self, value): 166 | """Encode value with base64. 167 | 168 | :param value: The value to encode. 169 | :return: Base64 encoded value. 170 | """ 171 | return base64.b64encode(value.encode(self.coding)).decode() 172 | 173 | def load(self, value): 174 | """Encode base64 encoded value. 175 | 176 | :param value: The base64 encoded string. 177 | :return: Decoded value. 178 | """ 179 | return base64.b64decode(value).decode(self.coding) 180 | 181 | 182 | base64_serializer = Base64Serializer() 183 | 184 | 185 | class JSONSerializer(FieldSerializer): 186 | """Serializer dumpin/loading values as JSON.""" 187 | 188 | def dump(self, value): 189 | """Dump value as JSON string. 190 | 191 | :param value: The value to serialize. 192 | :return: JSON string. 193 | """ 194 | return json.dumps(value) 195 | 196 | def load(self, value): 197 | """Load value from JSON string. 198 | 199 | :param value: The JSON string to deserialize. 200 | :return: Value loaded from JSON. 201 | """ 202 | return json.loads(value) 203 | 204 | 205 | json_serializer = JSONSerializer() 206 | 207 | 208 | class PickleSerializer(FieldSerializer): 209 | """Serializer dumpin/loading values as pickels.""" 210 | 211 | def dump(self, value): 212 | """Dump value as pickle. 213 | 214 | :param value: The object to serialize. 215 | :return: Pickled object. 216 | """ 217 | return pickle.dumps(value) 218 | 219 | def load(self, value): 220 | """Load value from pickle. 221 | 222 | :param value: The object pickle to deserialize. 223 | :return: Object loaded from pickle. 224 | """ 225 | return pickle.loads(value) 226 | 227 | 228 | pickle_serializer = PickleSerializer() 229 | 230 | 231 | class DateTimeSerializer(FieldSerializer): 232 | """Serializer for datetime instances. 233 | 234 | Converts value to string on serialization. 235 | Creates datetime instance of string on deserialization. 236 | """ 237 | 238 | format = '%Y-%m-%dT%H:%M:%S.%f' 239 | 240 | def dump(self, value): 241 | """Dump datetime value as string. 242 | 243 | :param value: The datetime object to serialize. 244 | :return: Datetime as string. 245 | """ 246 | return datetime.datetime.strftime(value, self.format) 247 | 248 | def load(self, value): 249 | """Load datetime value from string. 250 | 251 | :param value: The datetime string to deserialize. 252 | :return: Datetime object. 253 | """ 254 | return datetime.datetime.strptime(value, self.format) 255 | 256 | 257 | datetime_serializer = DateTimeSerializer() 258 | 259 | 260 | class NodeSerializer(FieldSerializer): 261 | """Serializer for handling node instances.""" 262 | 263 | def __init__(self, type_): 264 | """Create NodeSerializer instance. 265 | 266 | :param type_: The node type. 267 | """ 268 | self.type_ = type_ 269 | 270 | def dump(self, value): 271 | """Dump value as is. 272 | 273 | :param value: The node instance to serialize. 274 | :return: The node instance. 275 | """ 276 | return value 277 | 278 | def load(self, value): 279 | """Load value from parent. 280 | 281 | :param value: Value to deserialize node instance from. 282 | :return: The node instance. 283 | """ 284 | if isinstance(value, self.type_): 285 | return value 286 | name = self.name 287 | parent = self.parent 288 | value = parent[name] = self.type_(name=name, parent=parent) 289 | return value 290 | -------------------------------------------------------------------------------- /src/node/testing/__init__.py: -------------------------------------------------------------------------------- 1 | from node.testing.fullmapping import FullMappingTester # noqa 2 | -------------------------------------------------------------------------------- /src/node/testing/base.py: -------------------------------------------------------------------------------- 1 | from odict import odict 2 | 3 | 4 | def create_tree(class_): 5 | root = class_() 6 | for i in range(3): 7 | root['child_{}'.format(i)] = class_() 8 | for j in range(2): 9 | root['child_{}'.format(i)]['subchild_{}'.format(j)] = class_() 10 | return root 11 | 12 | 13 | class ResultWriter(object): 14 | 15 | def __init__(self, results, name=None): 16 | self.name = name 17 | self.results = results 18 | 19 | def success(self): 20 | self.results[self.name] = 'OK' 21 | 22 | def failed(self, exc): 23 | self.results[self.name] = 'failed: {}'.format(repr(exc)) 24 | 25 | 26 | class ContractError(Exception): 27 | pass 28 | 29 | 30 | class BaseTester(object): 31 | # list of interface contract attributes to test. 32 | # test functions always are named 'test_[contractname]'. 33 | # execution is in order, so you might depend tests to prior happened 34 | # context manipulation. 35 | iface_contract = [] 36 | 37 | direct_error = False 38 | 39 | def __init__(self, class_, context=None): 40 | """ 41 | :param class_: class object for creating children in test. 42 | :param context: an optional root context to test against, If None, an 43 | instance of class_ is created as root. 44 | """ 45 | self._results = odict() 46 | self.class_ = class_ 47 | self.context = context 48 | if self.context is None: 49 | self.context = class_() 50 | self._results = odict() 51 | 52 | @property 53 | def results(self): 54 | return self._results 55 | 56 | @property 57 | def combined(self): 58 | res = list() 59 | for key, val in sorted(self.writer().results.iteritems()): 60 | res.append('``{}``: {}'.format(key, val)) 61 | return '\n'.join(res) 62 | 63 | @property 64 | def wherefrom(self): 65 | res = list() 66 | for name in sorted(self.iface_contract): 67 | line = name + ': ' 68 | if name in self.class_.__dict__: 69 | line += self.class_.__name__ 70 | else: 71 | for base in self.class_.__bases__: 72 | if name in base.__dict__: 73 | line += base.__name__ 74 | continue 75 | line += 'function not found on object' 76 | res.append(line) 77 | return '\n'.join(res) 78 | 79 | def run(self): 80 | for name in self.iface_contract: 81 | func = getattr(self, 'test_%s' % name, None) 82 | if func is None: 83 | msg = '``{}`` does not provide ``test_{}``'.format( 84 | self.__class__.__name__, 85 | name 86 | ) 87 | raise ContractError(msg) 88 | writer = self.writer(name) 89 | if self.direct_error: 90 | func() 91 | writer.success() 92 | continue 93 | try: 94 | func() 95 | writer.success() 96 | except Exception as e: 97 | writer.failed(e) 98 | 99 | def writer(self, key=None): 100 | return ResultWriter(self._results, name=key) 101 | -------------------------------------------------------------------------------- /src/node/testing/env.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import AsAttrAccess 2 | from node.behaviors import DefaultInit 3 | from node.behaviors import MappingAdopt 4 | from node.behaviors import MappingConstraints 5 | from node.behaviors import MappingNode 6 | from node.behaviors import OdictStorage 7 | from node.interfaces import INode 8 | from plumber import plumbing 9 | from zope.interface import implementer 10 | 11 | 12 | @implementer(INode) 13 | class MockupNode(object): 14 | __name__ = None 15 | __parent__ = None 16 | 17 | 18 | class NoNode(object): 19 | pass 20 | 21 | 22 | @plumbing( 23 | MappingConstraints, 24 | DefaultInit, 25 | MappingAdopt, 26 | AsAttrAccess, 27 | MappingNode, 28 | OdictStorage) 29 | class MyNode(object): 30 | pass 31 | -------------------------------------------------------------------------------- /src/node/testing/profiling.py: -------------------------------------------------------------------------------- 1 | from node.base import Node 2 | import cProfile 3 | 4 | 5 | root = Node() 6 | 7 | 8 | def create(): 9 | global root 10 | for i in range(1, 10000): 11 | root[str(i)] = Node() 12 | 13 | 14 | def delete(): 15 | global root 16 | for i in range(1, 10000): 17 | del root[str(i)] 18 | 19 | 20 | cProfile.run('create()') 21 | cProfile.run('delete()') 22 | -------------------------------------------------------------------------------- /src/node/tests/__init__.py: -------------------------------------------------------------------------------- 1 | import doctest 2 | import unittest 3 | 4 | 5 | class patch(object): 6 | 7 | def __init__(self, module, name, ob): 8 | self.module = module 9 | self.name = name 10 | self.ob = ob 11 | 12 | def __call__(self, ob): 13 | ob.__test_patch__ = (self.module, self.name, self.ob) 14 | 15 | def _wrapped(*args, **kw): 16 | module, name, obj = ob.__test_patch__ 17 | orgin = (module, name, getattr(module, name)) 18 | setattr(module, name, obj) 19 | try: 20 | ob(*args, **kw) 21 | except Exception as e: 22 | raise e 23 | finally: 24 | module, name, obj = orgin 25 | setattr(module, name, obj) 26 | return _wrapped 27 | 28 | 29 | class Example(object): 30 | 31 | def __init__(self, want): 32 | self.want = want + '\n' 33 | 34 | 35 | class Failure(Exception): 36 | pass 37 | 38 | 39 | class NodeTestCase(unittest.TestCase): 40 | 41 | def __init__(self, *args, **kw): 42 | unittest.TestCase.__init__(self, *args, **kw) 43 | self._checker = doctest.OutputChecker() 44 | self._optionflags = ( 45 | doctest.NORMALIZE_WHITESPACE 46 | | doctest.ELLIPSIS 47 | | doctest.REPORT_ONLY_FIRST_FAILURE 48 | ) 49 | 50 | def expectError(self, exc, func, *args, **kw): 51 | try: 52 | func(*args, **kw) 53 | except exc as e: 54 | return e 55 | else: 56 | msg = 'Expected \'{}\' when calling \'{}\''.format(exc, func) 57 | raise Exception(msg) 58 | 59 | # B/C 60 | expect_error = expectError 61 | 62 | def checkOutput(self, want, got, optionflags=None): 63 | if optionflags is None: 64 | optionflags = self._optionflags 65 | success = self._checker.check_output(want, got, optionflags) 66 | if not success: 67 | raise Failure(self._checker.output_difference( 68 | Example(want), 69 | got, optionflags 70 | )) 71 | 72 | # B/C 73 | check_output = checkOutput 74 | -------------------------------------------------------------------------------- /src/node/tests/test_adopt.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import DictStorage 2 | from node.behaviors import FullMapping 3 | from node.behaviors import ListStorage 4 | from node.behaviors import MappingAdopt 5 | from node.behaviors import MutableSequence 6 | from node.behaviors import SequenceAdopt 7 | from node.interfaces import IMappingAdopt 8 | from node.interfaces import ISequenceAdopt 9 | from node.testing.env import MockupNode 10 | from node.testing.env import NoNode 11 | from node.tests import NodeTestCase 12 | from plumber import plumbing 13 | 14 | 15 | class TestAdopt(NodeTestCase): 16 | 17 | def test_MappingAdopt(self): 18 | @plumbing(MappingAdopt, FullMapping, DictStorage) 19 | class AdoptingDict(object): 20 | pass 21 | 22 | ad = AdoptingDict() 23 | self.assertTrue(IMappingAdopt.providedBy(ad)) 24 | 25 | # The mockup node is adopted 26 | node = MockupNode() 27 | ad['foo'] = node 28 | self.assertTrue(ad['foo'] is node) 29 | self.assertEqual(node.__name__, 'foo') 30 | self.assertTrue(node.__parent__ is ad) 31 | 32 | # The non-node object is not adopted 33 | nonode = NoNode() 34 | ad['bar'] = nonode 35 | self.assertTrue(ad['bar'] is nonode) 36 | self.assertFalse(hasattr(nonode, '__name__')) 37 | self.assertFalse(hasattr(nonode, '__parent__')) 38 | 39 | # If something goes wrong, the adoption does not happen. 40 | # All exceptions are caught. 41 | class FakeDict(object): 42 | def __setitem__(self, key, value): 43 | raise KeyError(key) 44 | 45 | def setdefault(self, key, default=None): 46 | pass # pragma: no cover 47 | 48 | @plumbing(MappingAdopt) 49 | class FailingAD(FakeDict): 50 | pass 51 | 52 | fail = FailingAD() 53 | node = MockupNode() 54 | 55 | with self.assertRaises(KeyError): 56 | fail['foo'] = node 57 | 58 | self.assertEqual(node.__name__, None) 59 | self.assertEqual(node.__parent__, None) 60 | 61 | def test_SequenceAdopt(self): 62 | @plumbing(SequenceAdopt, MutableSequence, ListStorage) 63 | class AdoptingList(object): 64 | pass 65 | 66 | al = AdoptingList() 67 | self.assertTrue(ISequenceAdopt.providedBy(al)) 68 | 69 | # The mockup node is adopted 70 | node = MockupNode() 71 | al.insert(0, node) 72 | self.assertTrue(al[0] is node) 73 | self.assertEqual(node.__name__, '0') 74 | self.assertTrue(node.__parent__ is al) 75 | 76 | al[0] = node = MockupNode() 77 | self.assertTrue(al[0] is node) 78 | self.assertEqual(node.__name__, '0') 79 | self.assertTrue(node.__parent__ is al) 80 | 81 | # The non-node object is not adopted 82 | nonode = NoNode() 83 | al[0] = nonode 84 | self.assertTrue(al[0] is nonode) 85 | self.assertFalse(hasattr(nonode, '__name__')) 86 | self.assertFalse(hasattr(nonode, '__parent__')) 87 | 88 | # Slicing is not supported 89 | with self.assertRaises(NotImplementedError): 90 | al[:] = [1, 2, 3] 91 | 92 | # If something goes wrong, the adoption does not happen. 93 | # All exceptions are caught. 94 | class FakeList(object): 95 | def __setitem__(self, index, value): 96 | pass # pragma: no cover 97 | 98 | def insert(self, index, value): 99 | raise Exception() 100 | 101 | @plumbing(SequenceAdopt) 102 | class FailingAL(FakeList): 103 | pass 104 | 105 | fail = FailingAL() 106 | node = MockupNode() 107 | 108 | with self.assertRaises(Exception): 109 | fail.insert(0, node) 110 | 111 | self.assertEqual(node.__name__, None) 112 | self.assertEqual(node.__parent__, None) 113 | 114 | def test_BC_imports(self): 115 | from node.behaviors import Adopt 116 | self.assertTrue(Adopt is MappingAdopt) 117 | 118 | from node.interfaces import IAdopt 119 | self.assertTrue(IAdopt is IMappingAdopt) 120 | -------------------------------------------------------------------------------- /src/node/tests/test_alias.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import Alias 2 | from node.behaviors.alias import AliaserChain 3 | from node.behaviors.alias import DictAliaser 4 | from node.behaviors.alias import PrefixAliaser 5 | from node.behaviors.alias import PrefixSuffixAliaser 6 | from node.behaviors.alias import SuffixAliaser 7 | from node.tests import NodeTestCase 8 | from plumber import plumbing 9 | 10 | 11 | class TestAlias(NodeTestCase): 12 | 13 | def test_DictAliaser(self): 14 | # A dict aliaser takes a dictionary as base for aliasing 15 | da = DictAliaser([('alias1', 'key1'), ('alias2', 'key2')]) 16 | self.assertEqual(da.alias('key1'), 'alias1') 17 | self.assertEqual(da.unalias('alias2'), 'key2') 18 | 19 | # By default, aliasing is strict, which means that only key/value pairs 20 | # set in aliaser are valid 21 | with self.assertRaises(KeyError) as arc: 22 | da.alias('foo') 23 | self.assertEqual(str(arc.exception), '\'foo\'') 24 | with self.assertRaises(KeyError) as arc: 25 | da.unalias('foo') 26 | self.assertEqual(str(arc.exception), '\'foo\'') 27 | 28 | # By setting strict to False, inexistent keys are returned as fallback 29 | da = DictAliaser( 30 | [('alias1', 'key1'), ('alias2', 'key2')], 31 | strict=False 32 | ) 33 | self.assertEqual(da.alias('foo'), 'foo') 34 | self.assertEqual(da.unalias('foo'), 'foo') 35 | 36 | def test_PrefixAliaser(self): 37 | # An aliaser that simply prefixes all keys 38 | pa = PrefixAliaser('prefix-') 39 | self.assertEqual(pa.alias('foo'), 'prefix-foo') 40 | self.assertEqual(pa.unalias('prefix-foo'), 'foo') 41 | 42 | with self.assertRaises(KeyError) as arc: 43 | pa.unalias('foo') 44 | expected = '"key \'foo\' does not match prefix \'prefix-\'"' 45 | self.assertTrue(str(arc.exception).find(expected) > -1) 46 | 47 | def test_SuffixAliaser(self): 48 | # An aliaser that simply suffixes all keys 49 | sa = SuffixAliaser('-suffix') 50 | self.assertEqual(sa.alias('foo'), 'foo-suffix') 51 | self.assertEqual(sa.unalias('foo-suffix'), 'foo') 52 | 53 | with self.assertRaises(KeyError) as arc: 54 | sa.unalias('foo') 55 | expected = '"key \'foo\' does not match suffix \'-suffix\'"' 56 | self.assertTrue(str(arc.exception).find(expected) > -1) 57 | 58 | def test_AliaserChain(self): 59 | # A chain of aliasers 60 | aliaser = AliaserChain() 61 | 62 | pa = PrefixAliaser('prefix-') 63 | pa2 = PrefixAliaser('pre2-') 64 | 65 | aliaser.chain = [pa, pa2] 66 | self.assertEqual(aliaser.alias('foo'), 'pre2-prefix-foo') 67 | self.assertEqual(aliaser.unalias(aliaser.alias('foo')), 'foo') 68 | 69 | aliaser.chain = [pa2, pa] 70 | self.assertEqual(aliaser.unalias(aliaser.alias('foo')), 'foo') 71 | 72 | def test_PrefixSuffixAliaser(self): 73 | # Combined prefix and suffix aliaser 74 | psa = PrefixSuffixAliaser('prefix-', '-suffix') 75 | self.assertEqual(psa.alias('foo'), 'prefix-foo-suffix') 76 | self.assertEqual(psa.unalias(psa.alias('foo')), 'foo') 77 | 78 | def test_Alias_no_aliaser(self): 79 | # A dictionary that uses the alias plumbing but does not assign an 80 | # aliaser. Therefore, no aliasing is happening 81 | @plumbing(Alias) 82 | class AliasDict(dict): 83 | pass 84 | 85 | ad = AliasDict() 86 | ad['foo'] = 1 87 | self.assertEqual(ad['foo'], 1) 88 | self.assertEqual([x for x in ad], ['foo']) 89 | 90 | del ad['foo'] 91 | self.assertEqual([x for x in ad], []) 92 | 93 | def test_Alias_with_PrefixAliaser(self): 94 | # A dictionary that uses the alias plumbing with a prefix aliaser 95 | @plumbing(Alias) 96 | class AliasDict(dict): 97 | aliaser = PrefixAliaser(prefix="pre-") 98 | 99 | ad = AliasDict() 100 | ad['pre-foo'] = 1 101 | self.assertEqual(ad['pre-foo'], 1) 102 | self.assertEqual([x for x in ad], ['pre-foo']) 103 | 104 | del ad['pre-foo'] 105 | self.assertEqual([x for x in ad], []) 106 | 107 | def test_KeyError_with_aliased_key(self): 108 | # KeyErrors in the backend are caught and re-raised with the value of 109 | # the aliased key 110 | class FakeDict(object): 111 | def __delitem__(self, key): 112 | raise KeyError(key) 113 | 114 | def __getitem__(self, key): 115 | raise KeyError(key) 116 | 117 | def __iter__(self): 118 | yield 'foo' 119 | 120 | def __setitem__(self, key, val): 121 | raise KeyError(key) 122 | 123 | @plumbing(Alias) 124 | class FailDict(FakeDict): 125 | aliaser = PrefixAliaser(prefix="pre-") 126 | 127 | fail = FailDict() 128 | 129 | with self.assertRaises(KeyError) as arc: 130 | fail['pre-foo'] = 1 131 | self.assertEqual(str(arc.exception), '\'pre-foo\'') 132 | 133 | with self.assertRaises(KeyError) as arc: 134 | fail['pre-foo'] 135 | self.assertEqual(str(arc.exception), '\'pre-foo\'') 136 | 137 | with self.assertRaises(KeyError) as arc: 138 | del fail['pre-foo'] 139 | self.assertEqual(str(arc.exception), '\'pre-foo\'') 140 | 141 | # A prefix aliaser cannot raise a KeyError, nevertheless, if it does, 142 | # that error must not be caught by the code that handle alias KeyErrors 143 | # for whitelisting. 144 | 145 | def failalias(key): 146 | raise KeyError() 147 | 148 | fail.aliaser.alias = failalias 149 | 150 | def fail_alias(): 151 | [x for x in fail] 152 | self.assertRaises(KeyError, fail_alias) 153 | 154 | def test_Alias_with_DictAliaser(self): 155 | # A dictionary that uses the alias plumbing with a dict aliaser 156 | @plumbing(Alias) 157 | class AliasDict(dict): 158 | aliaser = DictAliaser(data=(('foo', 'f00'), ('bar', 'b4r'))) 159 | 160 | ad = AliasDict() 161 | ad['foo'] = 1 162 | self.assertEqual([x for x in ad], ['foo']) 163 | 164 | # Let's put a key in the dict, that is not mapped by the dictionary 165 | # aliaser. This is not possible through the plumbing ``__setitem__``, 166 | # we need to use ``dict.__setitem__`` 167 | with self.assertRaises(KeyError) as arc: 168 | ad['abc'] = 1 169 | self.assertEqual(str(arc.exception), '\'abc\'') 170 | 171 | dict.__setitem__(ad, 'abc', 1) 172 | self.assertEqual([x for x in ad], ['foo']) 173 | 174 | # To see the keys that are really in the dictionary, we use 175 | # ``dict.__iter__``, not the plumbing ``__iter__`` 176 | self.assertEqual( 177 | [x for x in sorted(dict.__iter__(ad))], 178 | ['abc', 'f00'] 179 | ) 180 | -------------------------------------------------------------------------------- /src/node/tests/test_attributes.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import Attributes 2 | from node.behaviors import DefaultInit 3 | from node.behaviors import DictStorage 4 | from node.behaviors import MappingNode 5 | from node.behaviors import Nodespaces 6 | from node.behaviors.attributes import NodeAttributes 7 | from node.interfaces import INodeAttributes 8 | from node.tests import NodeTestCase 9 | from node.utils import AttributeAccess 10 | from plumber import plumbing 11 | 12 | 13 | class TestAttributes(NodeTestCase): 14 | 15 | def test_Attributes(self): 16 | @plumbing(Attributes, DefaultInit, MappingNode, DictStorage) 17 | class AttributedNode(object): 18 | pass 19 | 20 | node = AttributedNode(name='attributed') 21 | self.assertFalse(node.attribute_access_for_attrs) 22 | self.assertEqual(node.attributes_factory, NodeAttributes) 23 | 24 | with self.assertRaises(AttributeError): 25 | node.__attrs__ 26 | attrs = node.attrs 27 | self.assertEqual(node.__attrs__, attrs) 28 | 29 | self.assertIsInstance(node.attrs, NodeAttributes) 30 | self.assertTrue(INodeAttributes.providedBy(node.attrs)) 31 | 32 | expected = ': None\n' 45 | ' : c1\n' 46 | ' : c2\n' 47 | ' : d1\n' 48 | ' : d2\n' 49 | )) 50 | 51 | # Active invalidation of child by key 52 | root.invalidate(key='c1') 53 | self.assertEqual(root.treerepr(), ( 54 | ': None\n' 55 | ' : c2\n' 56 | ' : d1\n' 57 | ' : d2\n' 58 | )) 59 | 60 | with self.assertRaises(KeyError) as arc: 61 | root.invalidate(key='c1') 62 | self.assertEqual(str(arc.exception), '\'c1\'') 63 | 64 | # Active invalidation of all children 65 | root['c2'].invalidate() 66 | self.assertEqual(root.treerepr(), ( 67 | ': None\n' 68 | ' : c2\n' 69 | )) 70 | 71 | def test_VolatileStorageInvalidate(self): 72 | # When a node internally uses a volatile storage like ``DictStorage`` 73 | # or ``OdictStorage``, some can use ``VolatileStorageInvalidate`` for 74 | # invalidation 75 | @plumbing( 76 | MappingAdopt, 77 | VolatileStorageInvalidate, 78 | DefaultInit, 79 | MappingNode, 80 | OdictStorage) 81 | class Node(object): 82 | pass 83 | 84 | # Test tree: 85 | root = Node() 86 | root['c1'] = Node() 87 | root['c2'] = Node() 88 | root['c2']['d1'] = Node() 89 | root['c2']['d2'] = Node() 90 | 91 | self.assertTrue(IInvalidate.providedBy(root)) 92 | self.assertFalse(ICache.providedBy(root)) 93 | 94 | self.assertEqual(root.treerepr(), ( 95 | ': None\n' 96 | ' : c1\n' 97 | ' : c2\n' 98 | ' : d1\n' 99 | ' : d2\n' 100 | )) 101 | 102 | # Active invalidation of child by key 103 | root.invalidate(key='c1') 104 | self.assertEqual(root.treerepr(), ( 105 | ': None\n' 106 | ' : c2\n' 107 | ' : d1\n' 108 | ' : d2\n' 109 | )) 110 | 111 | with self.assertRaises(KeyError) as arc: 112 | root.invalidate(key='c1') 113 | self.assertEqual(str(arc.exception), '\'c1\'') 114 | 115 | # Active invalidation of all children 116 | root['c2'].invalidate() 117 | self.assertEqual(root.treerepr(), ( 118 | ': None\n' 119 | ' : c2\n' 120 | )) 121 | 122 | def test_with_ChildFactory(self): 123 | # Check for ChildFactory Node 124 | @plumbing( 125 | MappingAdopt, 126 | VolatileStorageInvalidate, 127 | DefaultInit, 128 | MappingNode, 129 | ChildFactory, 130 | OdictStorage) 131 | class Node(object): 132 | factories = { 133 | 'foo': BaseNode, 134 | 'bar': BaseNode, 135 | } 136 | 137 | node = Node() 138 | self.checkOutput("""\ 139 | [('bar', ), 140 | ('foo', )] 141 | """, str(sorted(node.items()))) 142 | 143 | node.invalidate('foo') 144 | self.assertEqual(sorted(node.keys()), ['bar', 'foo']) 145 | 146 | self.checkOutput("""\ 147 | [('bar', )] 148 | """, str(node.storage.items())) 149 | 150 | node.invalidate('foo') 151 | self.checkOutput("""\ 152 | [('bar', )] 153 | """, str(node.storage.items())) 154 | 155 | node.invalidate() 156 | self.assertEqual(node.storage.items(), []) 157 | 158 | with self.assertRaises(KeyError) as arc: 159 | node.invalidate('baz') 160 | self.assertEqual(str(arc.exception), '\'baz\'') 161 | 162 | def test_Cache(self): 163 | # Build a node with active invalidation and cache functionality 164 | @plumbing( 165 | MappingAdopt, 166 | Cache, 167 | Invalidate, 168 | DefaultInit, 169 | MappingNode, 170 | OdictStorage) 171 | class Node(object): 172 | pass 173 | 174 | root = Node() 175 | root['c1'] = Node() 176 | root['c2'] = Node() 177 | root['c2']['d1'] = Node() 178 | root['c2']['d2'] = Node() 179 | 180 | self.assertTrue(IInvalidate.providedBy(root)) 181 | self.assertTrue(ICache.providedBy(root)) 182 | 183 | # We just accessed 'c2' above, only cached value on root at the moment 184 | self.assertEqual(list(root.cache.keys()), ['c2']) 185 | expected = '), 194 | ('c2', )] 195 | """, str(sorted(root.cache.items()))) 196 | 197 | # Invalidate plumbing removes item from cache 198 | root.invalidate(key='c1') 199 | self.assertEqual(list(root.cache.keys()), ['c2']) 200 | 201 | root.invalidate() 202 | self.assertEqual(root.cache, {}) 203 | 204 | self.assertEqual(root.treerepr(), ( 205 | ': None\n' 206 | )) 207 | 208 | # Test invalidation plumbing hook with missing cache values 209 | root['x1'] = Node() 210 | root['x2'] = Node() 211 | self.assertEqual(root.treerepr(), ( 212 | ': None\n' 213 | ' : x1\n' 214 | ' : x2\n' 215 | )) 216 | 217 | self.checkOutput("""\ 218 | [('x1', ), 219 | ('x2', )] 220 | """, str(sorted(root.cache.items()))) 221 | 222 | del root.cache['x1'] 223 | del root.cache['x2'] 224 | 225 | root.invalidate(key='x1') 226 | self.assertEqual(root.treerepr(), ( 227 | ': None\n' 228 | ' : x2\n' 229 | )) 230 | 231 | del root.cache['x2'] 232 | root.invalidate() 233 | self.assertEqual(root.treerepr(), ( 234 | ': None\n' 235 | )) 236 | -------------------------------------------------------------------------------- /src/node/tests/test_common.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import DefaultInit 2 | from node.behaviors import MappingAdopt 3 | from node.behaviors import MappingNode 4 | from node.behaviors import OdictStorage 5 | from node.behaviors import UnicodeAware 6 | from node.behaviors import UUIDAware 7 | from node.tests import NodeTestCase 8 | from plumber import plumbing 9 | import uuid 10 | 11 | 12 | class TestCommon(NodeTestCase): 13 | 14 | def test_UnicodeAware(self): 15 | @plumbing(MappingNode, UnicodeAware, OdictStorage) 16 | class UnicodeNode(object): 17 | pass 18 | 19 | node = UnicodeNode() 20 | node['foo'] = UnicodeNode() 21 | self.assertEqual(list(node.keys()), [u'foo']) 22 | 23 | node['bar'] = 'bar' 24 | self.assertEqual( 25 | node.items(), 26 | [(u'foo', node['foo']), (u'bar', u'bar')] 27 | ) 28 | 29 | self.assertTrue(isinstance(node['foo'], UnicodeNode)) 30 | 31 | del node['bar'] 32 | self.assertEqual(list(node.keys()), [u'foo']) 33 | 34 | def test_UUIDAware(self): 35 | # Create a uid aware node. ``copy`` is not supported on UUIDAware node 36 | # trees, ``deepcopy`` must be used 37 | @plumbing( 38 | MappingAdopt, 39 | DefaultInit, 40 | MappingNode, 41 | OdictStorage, 42 | UUIDAware) 43 | class UUIDNode(object): 44 | pass 45 | 46 | # UUID is set at init time 47 | root = UUIDNode(name='root') 48 | self.assertTrue(isinstance(root.uuid, uuid.UUID)) 49 | 50 | # Shallow ``copy`` is prohibited for UUID aware nodes 51 | with self.assertRaises(RuntimeError) as arc: 52 | root.copy() 53 | exp = 'Shallow copy useless on UUID aware node trees, use deepcopy.' 54 | self.assertEqual(str(arc.exception), exp) 55 | 56 | # On ``deepcopy``, a new uid gets set: 57 | root_cp = root.deepcopy() 58 | self.assertFalse(root is root_cp) 59 | self.assertFalse(root.uuid == root_cp.uuid) 60 | 61 | # Create children, copy tree and check if all uuids have changed 62 | c1 = root['c1'] = UUIDNode() 63 | c1['s1'] = UUIDNode() 64 | self.assertEqual(root.treerepr(), ( 65 | ": root\n" 66 | " : c1\n" 67 | " : s1\n" 68 | )) 69 | 70 | root_cp = root.deepcopy() 71 | self.assertEqual(root_cp.treerepr(), ( 72 | ": root\n" 73 | " : c1\n" 74 | " : s1\n" 75 | )) 76 | 77 | self.assertFalse(root.uuid == root_cp.uuid) 78 | self.assertFalse(root['c1'].uuid == root_cp['c1'].uuid) 79 | self.assertFalse(root['c1']['s1'].uuid == root_cp['c1']['s1'].uuid) 80 | 81 | # When detaching part of a tree, uid's are not changed 82 | c1_uid = root['c1'].uuid 83 | s1_uid = root['c1']['s1'].uuid 84 | detached = root.detach('c1') 85 | 86 | self.assertEqual(root.treerepr(), ( 87 | ": root\n" 88 | )) 89 | 90 | self.assertEqual(detached.treerepr(), ( 91 | ": c1\n" 92 | " : s1\n" 93 | )) 94 | 95 | self.assertTrue(c1_uid == detached.uuid) 96 | self.assertTrue(s1_uid == detached['s1'].uuid) 97 | 98 | # uuid not gets overwritten on __init__ if already set 99 | UUIDNode.uuid = uuid.UUID('3252e869-72cf-4426-9e34-abbc3cbe3af0') 100 | uuid_node = UUIDNode() 101 | self.assertEqual(uuid_node.uuid, UUIDNode.uuid) 102 | -------------------------------------------------------------------------------- /src/node/tests/test_constraints.py: -------------------------------------------------------------------------------- 1 | from node.base import BaseNode 2 | from node.behaviors import DefaultInit 3 | from node.behaviors import ListStorage 4 | from node.behaviors import MappingConstraints 5 | from node.behaviors import MappingNode 6 | from node.behaviors import OdictStorage 7 | from node.behaviors import SequenceConstraints 8 | from node.behaviors import SequenceNode 9 | from node.behaviors.constraints import check_constraints 10 | from node.behaviors.constraints import child_constraints 11 | from node.behaviors.constraints import Constraints 12 | from node.interfaces import IMappingConstraints 13 | from node.interfaces import INode 14 | from node.tests import NodeTestCase 15 | from plumber import plumbing 16 | 17 | 18 | class TestConstraints(NodeTestCase): 19 | 20 | def test_child_constraints(self): 21 | class BC1: 22 | allow_non_node_childs = False 23 | 24 | ob = BC1() 25 | self.assertEqual(child_constraints(ob), (INode,)) 26 | 27 | ob.allow_non_node_childs = True 28 | self.assertEqual(child_constraints(ob), tuple()) 29 | 30 | class BC2: 31 | allow_non_node_children = False 32 | 33 | ob = BC2() 34 | self.assertEqual(child_constraints(ob), (INode,)) 35 | 36 | ob.allow_non_node_children = True 37 | self.assertEqual(child_constraints(ob), tuple()) 38 | 39 | class Constrains: 40 | child_constraints = None 41 | 42 | ob = Constrains() 43 | self.assertEqual(child_constraints(ob), tuple()) 44 | 45 | ob.child_constraints = (int, float) 46 | self.assertEqual(child_constraints(ob), (int, float)) 47 | 48 | def test_check_constraints(self): 49 | @plumbing(Constraints) 50 | class ConstraintsOb: 51 | pass 52 | 53 | self.assertEqual(ConstraintsOb.child_constraints, (INode,)) 54 | 55 | ob = ConstraintsOb() 56 | self.assertEqual(check_constraints(ob, BaseNode()), None) 57 | 58 | with self.assertRaises(ValueError) as arc: 59 | check_constraints(ob, object()) 60 | self.assertEqual( 61 | str(arc.exception), 62 | 'Given value does not implement node.interfaces.INode' 63 | ) 64 | 65 | ob.child_constraints = (int,) 66 | with self.assertRaises(ValueError) as arc: 67 | check_constraints(ob, '') 68 | self.assertEqual( 69 | str(arc.exception), 70 | 'Given value is no instance of int' 71 | ) 72 | self.assertEqual(check_constraints(ob, 0), None) 73 | 74 | ob.child_constraints = None 75 | self.assertEqual(check_constraints(ob, 0), None) 76 | 77 | def test_MappingConstraints(self): 78 | @plumbing(MappingConstraints, DefaultInit, MappingNode, OdictStorage) 79 | class MappingConstraintsNode(object): 80 | pass 81 | 82 | node = MappingConstraintsNode() 83 | with self.assertRaises(ValueError) as arc: 84 | node['child'] = 1 85 | self.assertEqual( 86 | str(arc.exception), 87 | 'Given value does not implement node.interfaces.INode' 88 | ) 89 | 90 | node.child_constraints = None 91 | node['child'] = 1 92 | self.assertEqual(node['child'], 1) 93 | 94 | def test_SequenceConstraints(self): 95 | @plumbing(SequenceConstraints, DefaultInit, SequenceNode, ListStorage) 96 | class SequenceConstraintsNode(object): 97 | pass 98 | 99 | node = SequenceConstraintsNode() 100 | with self.assertRaises(ValueError) as arc: 101 | node.insert(0, 1) 102 | self.assertEqual( 103 | str(arc.exception), 104 | 'Given value does not implement node.interfaces.INode' 105 | ) 106 | with self.assertRaises(ValueError) as arc: 107 | node['0'] = 1 108 | self.assertEqual( 109 | str(arc.exception), 110 | 'Given value does not implement node.interfaces.INode' 111 | ) 112 | 113 | node.child_constraints = None 114 | node.insert(0, 1) 115 | self.assertEqual(node['0'], 1) 116 | node['0'] = 0 117 | self.assertEqual(node['0'], 0) 118 | 119 | def test_BC_imports(self): 120 | from node.behaviors import NodeChildValidate 121 | self.assertTrue(MappingConstraints is NodeChildValidate) 122 | 123 | from node.interfaces import INodeChildValidate 124 | self.assertTrue(IMappingConstraints is INodeChildValidate) 125 | -------------------------------------------------------------------------------- /src/node/tests/test_context.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import BoundContext 2 | from node.interfaces import IBoundContext 3 | from node.tests import NodeTestCase 4 | from plumber import plumbing 5 | from zope.interface import implementer 6 | from zope.interface import Interface 7 | 8 | 9 | class IBoundInterface(Interface): 10 | pass 11 | 12 | 13 | class BoundClass(object): 14 | pass 15 | 16 | 17 | @plumbing(BoundContext) 18 | class ContextAware(object): 19 | 20 | @classmethod 21 | def unbind_context(cls): 22 | cls.__bound_context_interfaces__ = () 23 | cls.__bound_context_classes__ = () 24 | 25 | 26 | class TestContext(NodeTestCase): 27 | 28 | def test_BoundContext_bind_context(self): 29 | self.assertEqual(ContextAware.__bound_context_interfaces__, ()) 30 | self.assertEqual(ContextAware.__bound_context_classes__, ()) 31 | 32 | ContextAware.bind_context(None) 33 | self.assertEqual(ContextAware.__bound_context_interfaces__, ()) 34 | self.assertEqual(ContextAware.__bound_context_classes__, ()) 35 | 36 | ca = ContextAware() 37 | self.assertTrue(IBoundContext.providedBy(ca)) 38 | 39 | ContextAware.bind_context(IBoundInterface) 40 | self.assertEqual( 41 | ContextAware.__bound_context_interfaces__, 42 | (IBoundInterface,) 43 | ) 44 | self.assertEqual(ContextAware.__bound_context_classes__, ()) 45 | 46 | ContextAware.unbind_context() 47 | 48 | ContextAware.bind_context(BoundClass) 49 | self.assertEqual(ContextAware.__bound_context_interfaces__, ()) 50 | self.assertEqual( 51 | ContextAware.__bound_context_classes__, 52 | (BoundClass,) 53 | ) 54 | 55 | ContextAware.unbind_context() 56 | ContextAware.bind_context(IBoundInterface, BoundClass) 57 | self.assertEqual( 58 | ContextAware.__bound_context_interfaces__, 59 | (IBoundInterface,) 60 | ) 61 | self.assertEqual( 62 | ContextAware.__bound_context_classes__, 63 | (BoundClass,) 64 | ) 65 | 66 | with self.assertRaises(RuntimeError): 67 | ContextAware.bind_context(object) 68 | 69 | ContextAware.unbind_context() 70 | with self.assertRaises(ValueError): 71 | ContextAware.bind_context(lambda: 1) 72 | 73 | def test_BoundContext_context_matches(self): 74 | @implementer(IBoundInterface) 75 | class BoundInterface(object): 76 | pass 77 | 78 | ContextAware.unbind_context() 79 | inst = ContextAware() 80 | self.assertTrue(inst.context_matches(object())) 81 | 82 | ContextAware.bind_context(BoundClass) 83 | inst = ContextAware() 84 | self.assertFalse(inst.context_matches(object())) 85 | self.assertTrue(inst.context_matches(BoundClass())) 86 | 87 | ContextAware.unbind_context() 88 | ContextAware.bind_context(IBoundInterface) 89 | inst = ContextAware() 90 | self.assertFalse(inst.context_matches(object())) 91 | self.assertTrue(inst.context_matches(BoundInterface())) 92 | 93 | ContextAware.unbind_context() 94 | ContextAware.bind_context(IBoundInterface, BoundClass) 95 | inst = ContextAware() 96 | self.assertFalse(inst.context_matches(object())) 97 | self.assertTrue(inst.context_matches(BoundClass())) 98 | self.assertTrue(inst.context_matches(BoundInterface())) 99 | -------------------------------------------------------------------------------- /src/node/tests/test_factories.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import ChildFactory 2 | from node.behaviors import DefaultInit 3 | from node.behaviors import FixedChildren 4 | from node.behaviors import MappingNode 5 | from node.behaviors import Node 6 | from node.behaviors import OdictStorage 7 | from node.behaviors import WildcardFactory 8 | from node.behaviors.factories import _wildcard_pattern_occurrences 9 | from node.behaviors.factories import _wildcard_patterns_by_specificity 10 | from node.tests import NodeTestCase 11 | from odict import odict 12 | from plumber import plumbing 13 | 14 | 15 | @plumbing(DefaultInit, Node) 16 | class Factory(object): 17 | pass 18 | 19 | 20 | @plumbing(Node) 21 | class LegacyFactory(object): 22 | pass 23 | 24 | 25 | class TestFactories(NodeTestCase): 26 | 27 | def test_ChildFactory(self): 28 | @plumbing(MappingNode, ChildFactory, OdictStorage) 29 | class ChildFactoryNode(object): 30 | factories = odict([ 31 | ('factory', Factory), 32 | ('legacy', LegacyFactory) 33 | ]) 34 | 35 | node = ChildFactoryNode() 36 | self.assertEqual(list(node.keys()), ['factory', 'legacy']) 37 | self.assertTrue(isinstance(node['factory'], Factory)) 38 | self.assertTrue(isinstance(node['legacy'], LegacyFactory)) 39 | 40 | def test_FixedChildren(self): 41 | @plumbing(MappingNode, FixedChildren) 42 | class FixedChildrenNode(object): 43 | factories = odict([ 44 | ('factory', Factory), 45 | ('legacy', LegacyFactory) 46 | ]) 47 | 48 | node = FixedChildrenNode() 49 | self.assertEqual(list(node.keys()), ['factory', 'legacy']) 50 | self.assertTrue(isinstance(node['factory'], Factory)) 51 | self.assertTrue(isinstance(node['legacy'], LegacyFactory)) 52 | self.assertTrue(node['factory'] is node['factory']) 53 | 54 | with self.assertRaises(NotImplementedError) as arc: 55 | del node['factory'] 56 | self.assertEqual(str(arc.exception), 'read-only') 57 | 58 | with self.assertRaises(NotImplementedError) as arc: 59 | node['factory'] = Factory() 60 | self.assertEqual(str(arc.exception), 'read-only') 61 | 62 | @plumbing(MappingNode, FixedChildren) 63 | class LegacyFixedChildrenNode(object): 64 | fixed_children_factories = odict([ 65 | ('factory', Factory), 66 | ('legacy', LegacyFactory) 67 | ]) 68 | 69 | node = LegacyFixedChildrenNode() 70 | self.assertEqual(list(node.keys()), ['factory', 'legacy']) 71 | 72 | # B/C interface violation 73 | @plumbing(MappingNode, FixedChildren) 74 | class LegacyFixedChildrenNode(object): 75 | fixed_children_factories = ( 76 | ('factory', Factory), 77 | ('legacy', LegacyFactory) 78 | ) 79 | 80 | node = LegacyFixedChildrenNode() 81 | self.assertEqual(list(node.keys()), ['factory', 'legacy']) 82 | 83 | def test__wildcard_pattern_occurrences(self): 84 | self.assertEqual(_wildcard_pattern_occurrences('abc'), (3, 0, 0, 0)) 85 | self.assertEqual(_wildcard_pattern_occurrences('*a*b*'), (5, 3, 0, 0)) 86 | self.assertEqual(_wildcard_pattern_occurrences('?a?b?'), (5, 0, 3, 0)) 87 | self.assertEqual(_wildcard_pattern_occurrences('[]]'), (1, 0, 0, 1)) 88 | self.assertEqual(_wildcard_pattern_occurrences('[][!]'), (1, 0, 0, 1)) 89 | self.assertEqual( 90 | _wildcard_pattern_occurrences('[a-z]a[abc]b[!abc]'), 91 | (5, 0, 0, 3) 92 | ) 93 | self.assertEqual( 94 | _wildcard_pattern_occurrences('*?[a-z]a*[abc]b?[!abc]?*'), 95 | (11, 3, 3, 3) 96 | ) 97 | with self.assertRaises(ValueError): 98 | _wildcard_pattern_occurrences('*?[') 99 | with self.assertRaises(ValueError): 100 | _wildcard_pattern_occurrences('[*?') 101 | 102 | def test__wildcard_patterns_by_specificity(self): 103 | self.assertEqual(_wildcard_patterns_by_specificity( 104 | ('*.*', '*.a', '?.a', 'a.a')), 105 | ('a.a', '?.a', '*.a', '*.*') 106 | ) 107 | self.assertEqual(_wildcard_patterns_by_specificity( 108 | ('*.*', '*.a', '*a', '?.*', '?.a', '?a')), 109 | ('?.a', '?a', '*.a', '?.*', '*.*', '*a') 110 | ) 111 | self.assertEqual(_wildcard_patterns_by_specificity( 112 | ('?', '??', '[a-z]', '?[a-z]', '[abc][abc]')), 113 | ('[abc][abc]', '[a-z]', '?[a-z]', '??', '?') 114 | ) 115 | self.assertEqual(_wildcard_patterns_by_specificity( 116 | ('*', '*bc', '?bc', '[xyz]bc', 'abc')), 117 | ('abc', '[xyz]bc', '?bc', '*bc', '*') 118 | ) 119 | self.assertEqual(_wildcard_patterns_by_specificity( 120 | ('*', 'file.txt', '*.txt', '*_[0-9][0-9].txt')), 121 | ('file.txt', '*_[0-9][0-9].txt', '*.txt', '*') 122 | ) 123 | 124 | def test_WildcardFactory(self): 125 | @plumbing(WildcardFactory) 126 | class WildCardFactoryNode(object): 127 | factories = { 128 | '*': 'default_factory', 129 | 'file.txt': 'specific_text_file_factory', 130 | '*.txt': 'default_text_file_factory', 131 | '*_[0-9][0-9].txt': 'pattern_text_file_factory' 132 | } 133 | 134 | wcfn = WildCardFactoryNode() 135 | self.assertTrue(wcfn.pattern_weighting) 136 | self.assertEqual( 137 | wcfn.factory_for_pattern('file.txt'), 138 | 'specific_text_file_factory' 139 | ) 140 | self.assertEqual( 141 | wcfn.factory_for_pattern('file_01.txt'), 142 | 'pattern_text_file_factory' 143 | ) 144 | self.assertEqual( 145 | wcfn.factory_for_pattern('default.txt'), 146 | 'default_text_file_factory' 147 | ) 148 | self.assertEqual( 149 | wcfn.factory_for_pattern('default'), 150 | 'default_factory' 151 | ) 152 | 153 | @plumbing(WildcardFactory) 154 | class UnweightedWildCardFactoryNode(object): 155 | pattern_weighting = False 156 | factories = odict([ 157 | ('*.txt', 'default_text_file_factory',), 158 | ('file.txt', 'specific_text_file_factory') 159 | ]) 160 | 161 | uwcfn = UnweightedWildCardFactoryNode() 162 | # pattern weighting is disabled, patterns are searched in defined 163 | # order, therefor factory for ``file.txt`` never applies. 164 | self.assertEqual( 165 | uwcfn.factory_for_pattern('file.txt'), 166 | 'default_text_file_factory' 167 | ) 168 | -------------------------------------------------------------------------------- /src/node/tests/test_fallback.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import Attributes 2 | from node.behaviors import DefaultInit 3 | from node.behaviors import Fallback 4 | from node.behaviors import MappingAdopt 5 | from node.behaviors import MappingConstraints 6 | from node.behaviors import MappingNode 7 | from node.behaviors import Nodespaces 8 | from node.behaviors import OdictStorage 9 | from node.tests import NodeTestCase 10 | from plumber import plumbing 11 | 12 | 13 | ############################################################################### 14 | # Mock objects 15 | ############################################################################### 16 | 17 | @plumbing( 18 | Nodespaces, 19 | Fallback, 20 | MappingAdopt, 21 | DefaultInit, 22 | MappingNode, 23 | OdictStorage) 24 | class FallbackNodeAttributes(object): 25 | """Attributes Node for testing 26 | """ 27 | 28 | 29 | @plumbing( 30 | MappingConstraints, 31 | Nodespaces, 32 | MappingAdopt, 33 | Attributes, 34 | DefaultInit, 35 | MappingNode, 36 | OdictStorage) 37 | class FallbackNode(object): 38 | """Normal Node for testing 39 | """ 40 | attributes_factory = FallbackNodeAttributes 41 | 42 | 43 | ############################################################################### 44 | # Tests 45 | ############################################################################### 46 | 47 | class TestFallback(NodeTestCase): 48 | 49 | def setUp(self): 50 | # Setup test data 51 | super(TestFallback, self).setUp() 52 | 53 | # Define a root node 54 | fb_node = self.fb_node = FallbackNode(name='root') 55 | 56 | # It has a fallback subtree defined 57 | fb_node.fallback_key = 'x' 58 | 59 | # The fallback subtree defines a fallback sub tree for itself. 60 | # Note that attrs internally is also a tree! 61 | fb_node['x'] = FallbackNode() 62 | fb_node['x'].fallback_key = '1' 63 | 64 | # Define node without fallback, but with data 65 | fb_node['x']['1'] = FallbackNode() 66 | 67 | # An expected fallback value 68 | fb_node['x']['1'].attrs['a'] = 1 69 | 70 | # An unexpected fallback value. To make them better visible, they are 71 | # negative in this test 72 | fb_node['x']['1'].attrs['d'] = -3 73 | 74 | # Same on a second node for a different use case, where it find the 75 | # value on this level 76 | fb_node['x']['2'] = FallbackNode() 77 | fb_node['x']['2'].attrs['b'] = 2 78 | fb_node['x']['2'].attrs['d'] = -2 79 | 80 | # Define a second subtree 81 | fb_node['y'] = FallbackNode() 82 | 83 | # Here we have also a subtree which acts as fallback 84 | fb_node['y'].fallback_key = '1' 85 | 86 | # Again some data-only nodes in the subtree, still a fallback use case 87 | fb_node['y']['1'] = FallbackNode() 88 | fb_node['y']['1'].attrs['c'] = 3 89 | fb_node['y']['1'].attrs['d'] = -1 90 | 91 | # Define the node where our tests will look for the value 92 | fb_node['y']['2'] = FallbackNode() 93 | fb_node['y']['2'].attrs['d'] = 4 94 | 95 | def test_test_data(self): 96 | # Visualize the tree 97 | self.assertEqual(self.fb_node.treerepr(), ( 98 | ': root\n' 99 | ' : x\n' 100 | ' : 1\n' 101 | ' : 2\n' 102 | ' : y\n' 103 | ' : 1\n' 104 | ' : 2\n' 105 | )) 106 | 107 | def test_Fallback(self): 108 | # We always ask for attributes in the path 'root, y, 2, attrs'. 109 | # 'attrs' is in fact a nodespace '__attributes__', but internally its 110 | # handled like contained. See nodespaces for more info on it, this is 111 | # not fallback specific. 112 | 113 | # Case 1 - Directly ask for the key 'd' 114 | self.assertEqual(self.fb_node['y']['2'].attrs['d'], 4) 115 | 116 | # Case 2 - Ask for a key 'c' which does not exist in path. Now after 117 | # not finding it there it goes up one level to 'root, y'. Here it looks 118 | # if there is a fallback defined. There is one, its the subtree 119 | # 'root, y, 1'. Now it looks there relative in 'attrs' for 'c' and has 120 | # a hit. Value returned. 121 | self.assertEqual(self.fb_node['y']['2'].attrs['c'], 3) 122 | 123 | # Case 3 - Ask for a key 'b' which does not exist in the path. Now 124 | # after not finding it there it goes up one level to 'root, y'. Here it 125 | # looks if there is a fallback defined. There is one, its the subtree 126 | # 'root, y, 1'. It looks there relative in attrs for 'b' and it does 127 | # not exist. After not finding it there it goes up one level to 128 | # 'root, y'. It has a fallback, but that one was already visited. 129 | # Now it goes up another level on 'root' and looks if there is a 130 | # fallback defined. There is one, its the subtree 'root, x'. Now it 131 | # looks there relative for path '2, attrs, b' and has a hit. Value 132 | # returned. 133 | self.assertEqual(self.fb_node['y']['2'].attrs['b'], 2) 134 | 135 | # Case 4 - Ask for a key 'a' which does not exist in the path. Now 136 | # after not finding it there it goes up one level to 'root, y'. Here 137 | # it looks if there is a fallback defined. There is one, its the 138 | # subtree 'root, y, 1'. It looks there relative in attrs for a and it 139 | # does not exist. After not finding it there it goes up one level to 140 | # 'root, y'. It has a fallback, but that one was already visited. 141 | # Now it goes up another level on 'root' and looks if there is a 142 | # fallback defined. There is one, its the subtree 'root, x'. Now it 143 | # looks there relative for path '2, attrs, a' and it does not exist. 144 | # After not finding it there it goes up one level to 'root, x'. Here 145 | # it looks if there is a fallback defined. There is one, its the 146 | # subtree 'root, x, 1'. Now it looks there relative for path 147 | # 'attrs, a' and hit! Return value. 148 | self.assertEqual(self.fb_node['y']['2'].attrs['a'], 1) 149 | 150 | # Case 5 - When there is no fallback defined. We ask for a key 'z' 151 | # which does not exist in the path. Now after not finding it there it 152 | # goes up one level to 'root, y'. Here it looks if there is a fallback 153 | # defined. There is one, its the subtree 'root, y, 1'. It looks there 154 | # relative in attrs for z and it does not exist. After not finding it 155 | # there it goes up one level to 'root, y'. It has a fallback, but that 156 | # one was already visited. Now it goes up another level on 'root' and 157 | # looks if there is a fallback defined. There is one, its the subtree 158 | # 'root, x'. Now it looks there relative for path '2, attrs, z' and it 159 | # does not exist. After not finding it there it goes up one level to 160 | # 'root, x'. Here it looks if there is a fallback defined. There is 161 | # one, its the subtree 'root, x, 1'. Now it looks there relative for 162 | # path 'attrs, z' and it does not exist. After not finding it there it 163 | # goes up one level to 'root'. It has a fallback, but that one was 164 | # already visited. Next parent is None. Exit. No value found. Raise 165 | # KeyError 166 | with self.assertRaises(KeyError) as arc: 167 | self.fb_node['y']['2'].attrs['z'] 168 | self.assertEqual(str(arc.exception), '\'z\'') 169 | -------------------------------------------------------------------------------- /src/node/tests/test_filter.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import DictStorage 2 | from node.behaviors import ListStorage 3 | from node.behaviors import MappingFilter 4 | from node.behaviors import MappingNode 5 | from node.behaviors import NodeInit 6 | from node.behaviors import SequenceFilter 7 | from node.behaviors import SequenceNode 8 | from node.interfaces import IChildFilter 9 | from plumber import plumbing 10 | import unittest 11 | 12 | 13 | @plumbing( 14 | NodeInit, 15 | MappingNode, 16 | MappingFilter, 17 | DictStorage) 18 | class FilterMappingNode(object): 19 | pass 20 | 21 | 22 | @plumbing( 23 | NodeInit, 24 | SequenceNode, 25 | SequenceFilter, 26 | ListStorage) 27 | class FilterSequenceNode(object): 28 | pass 29 | 30 | 31 | class TestFilter(unittest.TestCase): 32 | 33 | def test_MappingFilter(self): 34 | node = FilterMappingNode() 35 | node_1 = node['1'] = FilterMappingNode() 36 | node_2 = node['2'] = FilterSequenceNode() 37 | 38 | self.assertEqual(node.filtered_children(FilterMappingNode), [node_1]) 39 | self.assertEqual( 40 | node.filtered_children(IChildFilter), 41 | [node_1, node_2] 42 | ) 43 | 44 | def test_SequenceFilter(self): 45 | node = FilterSequenceNode() 46 | node_1 = FilterMappingNode() 47 | node_2 = FilterSequenceNode() 48 | node.append(node_1) 49 | node.append(node_2) 50 | 51 | self.assertEqual(node.filtered_children(FilterMappingNode), [node_1]) 52 | self.assertEqual( 53 | node.filtered_children(IChildFilter), 54 | [node_1, node_2] 55 | ) 56 | -------------------------------------------------------------------------------- /src/node/tests/test_lifecycle.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import Attributes 2 | from node.behaviors import AttributesLifecycle 3 | from node.behaviors import DefaultInit 4 | from node.behaviors import DictStorage 5 | from node.behaviors import Lifecycle 6 | from node.behaviors import MappingNode 7 | from node.behaviors import NodeAttributes 8 | from node.behaviors import Nodespaces 9 | from node.behaviors import suppress_lifecycle_events 10 | from node.events import NodeAddedEvent 11 | from node.events import NodeCreatedEvent 12 | from node.events import NodeDetachedEvent 13 | from node.events import NodeModifiedEvent 14 | from node.events import NodeRemovedEvent 15 | from node.interfaces import INode 16 | from node.interfaces import INodeAddedEvent 17 | from node.interfaces import INodeCreatedEvent 18 | from node.interfaces import INodeDetachedEvent 19 | from node.interfaces import INodeModifiedEvent 20 | from node.interfaces import INodeRemovedEvent 21 | from node.tests import NodeTestCase 22 | from plumber import plumbing 23 | import zope.component 24 | 25 | 26 | ############################################################################### 27 | # Mock objects 28 | ############################################################################### 29 | 30 | class Handler(object): 31 | handled = [] 32 | 33 | def __call__(self, obj, event): 34 | self.handled.append(event) 35 | 36 | def clear(self): 37 | self.handled = [] 38 | 39 | 40 | @plumbing( 41 | DefaultInit, 42 | MappingNode, 43 | DictStorage) 44 | class NoLifecycleNode(object): 45 | pass 46 | 47 | 48 | @plumbing(AttributesLifecycle) 49 | class LifecycleNodeAttributes(NodeAttributes): 50 | pass 51 | 52 | 53 | @plumbing( 54 | Nodespaces, 55 | Attributes, 56 | Lifecycle, 57 | DefaultInit, 58 | MappingNode, 59 | DictStorage) 60 | class LifecycleNode(object): 61 | attributes_factory = LifecycleNodeAttributes 62 | 63 | 64 | ############################################################################### 65 | # Tests 66 | ############################################################################### 67 | 68 | class TestLifecycle(NodeTestCase): 69 | 70 | def setUp(self): 71 | super(TestLifecycle, self).setUp() 72 | handler = self.handler = Handler() 73 | zope.component.provideHandler(handler, [INode, INodeCreatedEvent]) 74 | zope.component.provideHandler(handler, [INode, INodeAddedEvent]) 75 | zope.component.provideHandler(handler, [INode, INodeModifiedEvent]) 76 | zope.component.provideHandler(handler, [INode, INodeRemovedEvent]) 77 | zope.component.provideHandler(handler, [INode, INodeDetachedEvent]) 78 | 79 | def test_NodeCreatedEvent(self): 80 | # Check NodeCreation 81 | self.handler.clear() 82 | 83 | NoLifecycleNode(name='no_notify') 84 | self.assertEqual(self.handler.handled, []) 85 | 86 | LifecycleNode(name='root') 87 | self.assertEqual(len(self.handler.handled), 1) 88 | self.assertTrue(isinstance(self.handler.handled[0], NodeCreatedEvent)) 89 | 90 | self.handler.clear() 91 | 92 | def test_NodeAddedEvent(self): 93 | # Check Node adding 94 | root = LifecycleNode(name='root') 95 | 96 | self.handler.clear() 97 | 98 | root['child1'] = LifecycleNode() 99 | self.assertEqual(len(self.handler.handled), 2) 100 | self.assertTrue(isinstance(self.handler.handled[0], NodeCreatedEvent)) 101 | self.assertTrue(isinstance(self.handler.handled[1], NodeAddedEvent)) 102 | 103 | self.handler.clear() 104 | 105 | def test_NodeModifiedEvent(self): 106 | # Check Node modification 107 | root = LifecycleNode(name='root') 108 | child = root['child'] = LifecycleNode() 109 | 110 | self.handler.clear() 111 | 112 | # No event, despite the node creation for the attributes nodespace 113 | attrs = child.attrs 114 | self.assertTrue(isinstance(attrs, LifecycleNodeAttributes)) 115 | self.assertEqual(len(self.handler.handled), 0) 116 | 117 | self.handler.clear() 118 | 119 | # Node modified events if the attributes nodespace is changed 120 | child.attrs['foo'] = 1 121 | self.assertEqual(len(self.handler.handled), 1) 122 | self.assertTrue(isinstance(self.handler.handled[0], NodeModifiedEvent)) 123 | 124 | self.handler.clear() 125 | 126 | del child.attrs['foo'] 127 | self.assertEqual(len(self.handler.handled), 1) 128 | self.assertTrue(isinstance(self.handler.handled[0], NodeModifiedEvent)) 129 | 130 | self.handler.clear() 131 | 132 | def test_NodeRemovedEvent(self): 133 | # Check Node Deletion 134 | root = LifecycleNode(name='root') 135 | root['child'] = LifecycleNode() 136 | 137 | self.handler.clear() 138 | 139 | del root['child'] 140 | self.assertEqual(len(self.handler.handled), 1) 141 | self.assertTrue(isinstance(self.handler.handled[0], NodeRemovedEvent)) 142 | 143 | self.handler.clear() 144 | 145 | def test_NodeDetachedEvent(self): 146 | # Check Node Detach 147 | root = LifecycleNode(name='root') 148 | root['child'] = LifecycleNode() 149 | 150 | self.handler.clear() 151 | 152 | root.detach('child') 153 | self.assertEqual(len(self.handler.handled), 1) 154 | self.assertTrue(isinstance(self.handler.handled[0], NodeDetachedEvent)) 155 | 156 | self.handler.clear() 157 | 158 | def test__notify_suppress(self): 159 | # Check notify suppress on ``__setitem__`` 160 | root = LifecycleNode(name='root') 161 | 162 | self.handler.clear() 163 | 164 | with suppress_lifecycle_events(): 165 | root['child'] = NoLifecycleNode() 166 | self.assertEqual(len(self.handler.handled), 0) 167 | 168 | # Check notify suppress on attributes manipulation 169 | attrs = root.attrs 170 | with suppress_lifecycle_events(): 171 | attrs['foo'] = 'foo' 172 | self.assertEqual(len(self.handler.handled), 0) 173 | 174 | with suppress_lifecycle_events(): 175 | del attrs['foo'] 176 | self.assertEqual(len(self.handler.handled), 0) 177 | -------------------------------------------------------------------------------- /src/node/tests/test_locking.py: -------------------------------------------------------------------------------- 1 | from node import locking 2 | from node.base import BaseNode 3 | from node.tests import patch 4 | from node.tests import unittest 5 | import threading 6 | 7 | 8 | class MockLock(object): 9 | 10 | def __init__(self): 11 | self._lock = threading.RLock() 12 | self.count = 0 13 | 14 | def acquire(self): 15 | self._lock.acquire() 16 | self.count += 1 17 | 18 | def release(self): 19 | self._lock.release() 20 | self.count -= 1 21 | 22 | 23 | class TestLocking(unittest.TestCase): 24 | 25 | @patch(locking, 'RLock', MockLock) 26 | def test_TreeLock(self): 27 | node = BaseNode() 28 | lock = locking.TreeLock(node) 29 | self.assertEqual(lock.lock.count, 0) 30 | lock.acquire() 31 | self.assertEqual(lock.lock.count, 1) 32 | lock.acquire() 33 | self.assertEqual(lock.lock.count, 2) 34 | lock.release() 35 | self.assertEqual(lock.lock.count, 1) 36 | lock.release() 37 | self.assertEqual(lock.lock.count, 0) 38 | 39 | @patch(locking, 'RLock', MockLock) 40 | def test_with_TreeLock(self): 41 | node = BaseNode() 42 | lock = locking.TreeLock(node) 43 | with lock: 44 | self.assertEqual(lock.lock.count, 1) 45 | self.assertEqual(lock.lock.count, 0) 46 | 47 | @patch(locking, 'RLock', MockLock) 48 | def test_locktree(self): 49 | testcase = self 50 | 51 | class LockingNode(BaseNode): 52 | @locking.locktree 53 | def locked(self): 54 | testcase.assertEqual(self.root._treelock.count, 1) 55 | 56 | node = LockingNode() 57 | self.assertFalse(hasattr(node.root, '_treelock')) 58 | node.locked() 59 | self.assertEqual(node.root._treelock.count, 0) 60 | -------------------------------------------------------------------------------- /src/node/tests/test_mapping.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import DefaultInit 2 | from node.behaviors import DictStorage 3 | from node.behaviors import FullMapping 4 | from node.behaviors import MappingAdopt 5 | from node.behaviors import MappingNode as MappingNodeBehavior 6 | from node.behaviors import OdictStorage 7 | from node.interfaces import IContentishNode 8 | from node.interfaces import IMappingNode 9 | from node.interfaces import INode 10 | from node.testing import FullMappingTester 11 | from node.tests import NodeTestCase 12 | from plumber import plumbing 13 | from zope.interface import alsoProvides 14 | from zope.interface import Interface 15 | 16 | 17 | ############################################################################### 18 | # Mock objects 19 | ############################################################################### 20 | 21 | @plumbing(FullMapping) 22 | class FailingFullMapping(object): 23 | pass 24 | 25 | 26 | @plumbing( 27 | FullMapping, 28 | DictStorage) 29 | class SuccessFullMapping(object): 30 | pass 31 | 32 | 33 | @plumbing( 34 | MappingAdopt, 35 | DefaultInit, 36 | MappingNodeBehavior, 37 | OdictStorage) 38 | class MappingNode(object): 39 | pass 40 | 41 | 42 | class RootMappingNode(MappingNode): 43 | pass 44 | 45 | 46 | class INodeInterface(Interface): 47 | pass 48 | 49 | 50 | class INoInterface(Interface): 51 | pass 52 | 53 | 54 | ############################################################################### 55 | # Tests 56 | ############################################################################### 57 | 58 | class TestMapping(NodeTestCase): 59 | 60 | def test_fullmapping_fails(self): 61 | # A full mapping that is going to fail, because nobody takes care about 62 | # ``__delitem__``, ``__getitem__``, ``__iter__`` and ``__setitem__`` 63 | tester = FullMappingTester(FailingFullMapping, node_checks=False) 64 | tester.run() 65 | self.checkOutput("""\ 66 | ``__contains__``: failed: NotImplementedError() 67 | ``__delitem__``: failed: NotImplementedError() 68 | ``__getitem__``: failed: NotImplementedError() 69 | ``__iter__``: failed: NotImplementedError() 70 | ``__len__``: failed: NotImplementedError() 71 | ``__setitem__``: failed: NotImplementedError() 72 | ``clear``: failed: NotImplementedError() 73 | ``copy``: failed: NotImplementedError() 74 | ``get``: failed: NotImplementedError() 75 | ``has_key``: failed: NotImplementedError() 76 | ``items``: failed: NotImplementedError() 77 | ``iteritems``: failed: NotImplementedError() 78 | ``iterkeys``: failed: NotImplementedError() 79 | ``itervalues``: failed: NotImplementedError() 80 | ``keys``: failed: NotImplementedError() 81 | ``pop``: failed: NotImplementedError() 82 | ``popitem``: failed: NotImplementedError() 83 | ``setdefault``: failed: NotImplementedError() 84 | ``update``: failed: NotImplementedError() 85 | ``values``: failed: NotImplementedError() 86 | """, tester.combined) 87 | 88 | # All methods are defined on the class by the FullMapping behavior, 89 | # none are inherited from base classes 90 | self.checkOutput("""\ 91 | __contains__: FailingFullMapping 92 | __delitem__: FailingFullMapping 93 | __getitem__: FailingFullMapping 94 | __iter__: FailingFullMapping 95 | __len__: FailingFullMapping 96 | __setitem__: FailingFullMapping 97 | clear: FailingFullMapping 98 | copy: FailingFullMapping 99 | get: FailingFullMapping 100 | has_key: FailingFullMapping 101 | items: FailingFullMapping 102 | iteritems: FailingFullMapping 103 | iterkeys: FailingFullMapping 104 | itervalues: FailingFullMapping 105 | keys: FailingFullMapping 106 | pop: FailingFullMapping 107 | popitem: FailingFullMapping 108 | setdefault: FailingFullMapping 109 | update: FailingFullMapping 110 | values: FailingFullMapping 111 | """, tester.wherefrom) 112 | 113 | def test_fullmapping_success(self): 114 | # Use a storage 115 | tester = FullMappingTester(SuccessFullMapping, node_checks=False) 116 | tester.run() 117 | self.checkOutput("""\ 118 | ``__contains__``: OK 119 | ``__delitem__``: OK 120 | ``__getitem__``: OK 121 | ``__iter__``: OK 122 | ``__len__``: OK 123 | ``__setitem__``: OK 124 | ``clear``: OK 125 | ``copy``: OK 126 | ``get``: OK 127 | ``has_key``: OK 128 | ``items``: OK 129 | ``iteritems``: OK 130 | ``iterkeys``: OK 131 | ``itervalues``: OK 132 | ``keys``: OK 133 | ``pop``: OK 134 | ``popitem``: OK 135 | ``setdefault``: OK 136 | ``update``: OK 137 | ``values``: OK 138 | """, tester.combined) 139 | 140 | # Only the Four were taken from the base class, the others were filled 141 | # in by the FullMapping behavior 142 | self.checkOutput("""\ 143 | __contains__: SuccessFullMapping 144 | __delitem__: SuccessFullMapping 145 | __getitem__: SuccessFullMapping 146 | __iter__: SuccessFullMapping 147 | __len__: SuccessFullMapping 148 | __setitem__: SuccessFullMapping 149 | clear: SuccessFullMapping 150 | copy: SuccessFullMapping 151 | get: SuccessFullMapping 152 | has_key: SuccessFullMapping 153 | items: SuccessFullMapping 154 | iteritems: SuccessFullMapping 155 | iterkeys: SuccessFullMapping 156 | itervalues: SuccessFullMapping 157 | keys: SuccessFullMapping 158 | pop: SuccessFullMapping 159 | popitem: SuccessFullMapping 160 | setdefault: SuccessFullMapping 161 | update: SuccessFullMapping 162 | values: SuccessFullMapping 163 | """, tester.wherefrom) 164 | 165 | def test_MappingNode(self): 166 | root = MappingNode(name='root') 167 | self.assertTrue(INode.providedBy(root)) 168 | self.assertTrue(IContentishNode.providedBy(root)) 169 | self.assertTrue(IMappingNode.providedBy(root)) 170 | 171 | root['child'] = MappingNode() 172 | self.assertEqual(root.name, 'root') 173 | self.assertEqual(root.parent, None) 174 | 175 | child = root['child'] 176 | self.assertEqual(child.name, 'child') 177 | self.assertEqual(child.parent, root) 178 | self.assertEqual(root.treerepr(), ( 179 | ': root\n' 180 | ' : child\n' 181 | )) 182 | self.assertTrue(bool(root)) 183 | 184 | tester = FullMappingTester(MappingNode) 185 | tester.run() 186 | self.assertEqual(tester.combined, ( 187 | '``__contains__``: OK\n' 188 | '``__delitem__``: OK\n' 189 | '``__getitem__``: OK\n' 190 | '``__iter__``: OK\n' 191 | '``__len__``: OK\n' 192 | '``__setitem__``: OK\n' 193 | '``clear``: OK\n' 194 | '``copy``: OK\n' 195 | '``get``: OK\n' 196 | '``has_key``: OK\n' 197 | '``items``: OK\n' 198 | '``iteritems``: OK\n' 199 | '``iterkeys``: OK\n' 200 | '``itervalues``: OK\n' 201 | '``keys``: OK\n' 202 | '``pop``: OK\n' 203 | '``popitem``: OK\n' 204 | '``setdefault``: OK\n' 205 | '``update``: OK\n' 206 | '``values``: OK' 207 | )) 208 | 209 | root = RootMappingNode(name='root') 210 | child = root['child'] = MappingNode() 211 | subchild = child['subchild'] = MappingNode() 212 | self.assertEqual(root.treerepr(), ( 213 | ': root\n' 214 | ' : child\n' 215 | ' : subchild\n' 216 | )) 217 | 218 | root[u'\xf6'] = MappingNode() 219 | self.checkOutput("""\ 220 | : root 221 | __: child 222 | ____: subchild 223 | __: ... 224 | """, root.treerepr(prefix='_')) 225 | 226 | self.checkOutput("""\ 227 | 228 | """, repr(root[u'\xf6'])) 229 | 230 | alsoProvides(child, INodeInterface) 231 | self.assertEqual(subchild.acquire(RootMappingNode), root) 232 | self.assertEqual(subchild.acquire(INodeInterface), child) 233 | self.assertEqual(subchild.acquire(INode), child) 234 | self.assertEqual(subchild.acquire(INoInterface), None) 235 | 236 | # detach 237 | self.assertEqual(child.name, 'child') 238 | self.assertEqual(child.parent, root) 239 | child = root.detach('child') 240 | self.assertEqual(child.name, 'child') 241 | self.assertEqual(child.parent, None) 242 | self.assertFalse('child' in root) 243 | self.checkOutput("""\ 244 | : child 245 | __: subchild 246 | """, child.treerepr(prefix='_')) 247 | 248 | def test_BC_imports(self): 249 | from node.behaviors import Nodify 250 | self.assertTrue(Nodify is MappingNodeBehavior) 251 | 252 | from node.interfaces import INodify 253 | self.assertTrue(INodify is IMappingNode) 254 | -------------------------------------------------------------------------------- /src/node/tests/test_node.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import DefaultInit 2 | from node.behaviors import MappingNode 3 | from node.behaviors import Node 4 | from node.behaviors import NodeInit 5 | from node.interfaces import IDefaultInit 6 | from node.interfaces import INode 7 | from node.interfaces import INodeInit 8 | from node.tests import NodeTestCase 9 | from plumber import plumbing 10 | from zope.interface import Interface 11 | 12 | 13 | ############################################################################### 14 | # Mock objects 15 | ############################################################################### 16 | 17 | @plumbing(DefaultInit) 18 | class DefaultInitObject(object): 19 | pass 20 | 21 | 22 | @plumbing(NodeInit) 23 | class NodeInitObject(object): 24 | 25 | def __init__(self, foo, bar=None): 26 | self.foo = foo 27 | self.bar = bar 28 | 29 | 30 | @plumbing(DefaultInit, Node) 31 | class NodeObject(object): 32 | pass 33 | 34 | 35 | @plumbing(DefaultInit, Node, MappingNode) 36 | class BrokenMapping(object): 37 | 38 | def __iter__(self): 39 | yield 'child' 40 | 41 | 42 | class NoInterface(Interface): 43 | pass 44 | 45 | 46 | ############################################################################### 47 | # Tests 48 | ############################################################################### 49 | 50 | class TestNode(NodeTestCase): 51 | 52 | def test_DefaultInit(self): 53 | obj = DefaultInitObject(name='name', parent='parent') 54 | self.assertTrue(IDefaultInit.providedBy(obj)) 55 | self.assertEqual(obj.__name__, 'name') 56 | self.assertEqual(obj.__parent__, 'parent') 57 | 58 | def test_NodeInit(self): 59 | obj = NodeInitObject('foo', name='name', parent='parent', bar='bar') 60 | self.assertTrue(INodeInit.providedBy(obj)) 61 | self.assertEqual(obj.__name__, 'name') 62 | self.assertEqual(obj.__parent__, 'parent') 63 | self.assertEqual(obj.foo, 'foo') 64 | self.assertEqual(obj.bar, 'bar') 65 | 66 | def test_Node(self): 67 | parent = NodeObject(name='parent') 68 | node = NodeObject(name='node', parent=parent) 69 | 70 | # interface 71 | self.assertTrue(INode.providedBy(node)) 72 | 73 | # __name__ 74 | self.assertEqual(node.__name__, 'node') 75 | self.assertEqual(node.name, 'node') 76 | with self.assertRaises(AttributeError): 77 | node.name = '' 78 | 79 | # __parent__ 80 | self.assertEqual(node.__parent__, parent) 81 | self.assertEqual(node.parent, parent) 82 | with self.assertRaises(AttributeError): 83 | node.parent = None 84 | 85 | # path 86 | self.assertEqual(node.path, ['parent', 'node']) 87 | 88 | # root 89 | self.assertEqual(node.root, parent) 90 | 91 | # acquire 92 | self.assertEqual(node.acquire(NoInterface), None) 93 | self.assertEqual(node.acquire(INode), parent) 94 | self.assertEqual(node.acquire(NodeObject), parent) 95 | 96 | # __nonzero__, __bool__ 97 | self.assertTrue(bool(node)) 98 | 99 | # __repr__, __str__ 100 | self.checkOutput(""" 101 | 102 | """, str(node)) 103 | 104 | # noderepr 105 | self.assertEqual( 106 | node.noderepr, 107 | ": node" 108 | ) 109 | 110 | # treerepr 111 | broken = BrokenMapping(name='broken') 112 | self.checkOutput(""" 113 | : broken 114 | __child: 'NotImplementedError()' 115 | """, broken.treerepr(prefix='_')) 116 | -------------------------------------------------------------------------------- /src/node/tests/test_nodespace.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import DefaultInit 2 | from node.behaviors import MappingAdopt 3 | from node.behaviors import MappingNode 4 | from node.behaviors import Nodespaces 5 | from node.behaviors import OdictStorage 6 | from node.tests import NodeTestCase 7 | from odict import odict 8 | from plumber import plumbing 9 | 10 | 11 | ############################################################################### 12 | # Mock objects 13 | ############################################################################### 14 | 15 | @plumbing( 16 | MappingAdopt, 17 | Nodespaces, 18 | MappingNode, 19 | OdictStorage) 20 | class NodespacesNode(odict): 21 | pass 22 | 23 | 24 | @plumbing( 25 | MappingAdopt, 26 | MappingNode, 27 | DefaultInit, 28 | OdictStorage) 29 | class SomeNode(object): 30 | pass 31 | 32 | 33 | ############################################################################### 34 | # Tests 35 | ############################################################################### 36 | 37 | class TestNodespace(NodeTestCase): 38 | 39 | def test_Nodespaces(self): 40 | node = NodespacesNode() 41 | self.assertTrue(isinstance(node.nodespaces, odict)) 42 | self.assertEqual(node.nodespaces['__children__'], node) 43 | 44 | child = node['__children__']['child'] = SomeNode() 45 | self.assertEqual(node['child'], child) 46 | 47 | self.assertTrue(node['__children__']['child'] is node['child']) 48 | 49 | foo = node['__foo__'] = SomeNode() 50 | self.assertEqual(node['__foo__'], foo) 51 | 52 | child = node['__foo__']['child'] = SomeNode() 53 | self.assertEqual(node['__foo__']['child'], child) 54 | 55 | self.assertFalse(node['__foo__']['child'] is node['child']) 56 | 57 | self.assertEqual(len(node.nodespaces), 2) 58 | self.assertEqual(node.nodespaces['__children__'], node) 59 | self.assertEqual(node.nodespaces['__foo__'], foo) 60 | 61 | with self.assertRaises(KeyError) as arc: 62 | node['__inexistent__'] 63 | self.assertEqual(str(arc.exception), '\'__inexistent__\'') 64 | 65 | with self.assertRaises(KeyError) as arc: 66 | node['inexistent'] 67 | self.assertEqual(str(arc.exception), '\'inexistent\'') 68 | 69 | del node['child'] 70 | self.assertEqual(node.keys(), []) 71 | 72 | self.assertEqual(list(node['__foo__'].keys()), ['child']) 73 | 74 | del node['__foo__'] 75 | self.assertEqual(len(node.nodespaces), 1) 76 | 77 | self.assertEqual(list(node.nodespaces.keys()), ['__children__']) 78 | -------------------------------------------------------------------------------- /src/node/tests/test_sequence.py: -------------------------------------------------------------------------------- 1 | from node.base import BaseNode 2 | from node.behaviors import ListStorage 3 | from node.behaviors import MutableSequence 4 | from node.behaviors import Sequence 5 | from node.behaviors import SequenceNode as SequenceNodeBehavior 6 | from node.interfaces import IContentishNode 7 | from node.interfaces import IMappingNode 8 | from node.interfaces import INode 9 | from node.interfaces import ISequenceNode 10 | from node.tests import NodeTestCase 11 | from plumber import plumbing 12 | from zope.interface import Interface 13 | 14 | 15 | class TestSequence(NodeTestCase): 16 | 17 | def test_Sequence(self): 18 | @plumbing(Sequence) 19 | class AbstractTestSequence(object): 20 | pass 21 | 22 | seq = AbstractTestSequence() 23 | 24 | # __len__ 25 | with self.assertRaises(NotImplementedError): 26 | len(seq) 27 | 28 | # __getitem__ 29 | with self.assertRaises(NotImplementedError): 30 | seq[0] 31 | 32 | @plumbing(Sequence) 33 | class TestSequence(object): 34 | def __init__(self, data): 35 | self.data = data 36 | 37 | def __len__(self): 38 | return len(self.data) 39 | 40 | def __getitem__(self, index): 41 | return self.data[index] 42 | 43 | seq = TestSequence([1, 2, 3]) 44 | 45 | # __len__ 46 | self.assertEqual(len(seq), 3) 47 | 48 | # __getitem__ 49 | self.assertEqual(seq[0], 1) 50 | with self.assertRaises(IndexError): 51 | seq[3] 52 | 53 | # __contains__ 54 | self.assertTrue(1 in seq) 55 | self.assertFalse(4 in seq) 56 | 57 | # __iter__ 58 | self.assertEqual(list(iter(seq)), [1, 2, 3]) 59 | 60 | # __reversed__ 61 | self.assertEqual(list(reversed(seq)), [3, 2, 1]) 62 | 63 | # count 64 | self.assertEqual(seq.count(1), 1) 65 | self.assertEqual(seq.count(4), 0) 66 | 67 | # index 68 | self.assertEqual(seq.index(2), 1) 69 | with self.assertRaises(ValueError): 70 | seq.index(4) 71 | 72 | def test_MutableSequence(self): 73 | @plumbing(MutableSequence) 74 | class AbstractTestMutableSequence(object): 75 | pass 76 | 77 | mseq = AbstractTestMutableSequence() 78 | 79 | # __setitem__ 80 | with self.assertRaises(NotImplementedError): 81 | mseq[0] = 0 82 | 83 | # __delitem__ 84 | with self.assertRaises(NotImplementedError): 85 | del mseq[0] 86 | 87 | # insert 88 | with self.assertRaises(NotImplementedError): 89 | mseq.insert(0, 0) 90 | 91 | @plumbing(MutableSequence) 92 | class TestMutableSequence(object): 93 | def __init__(self, data): 94 | self.data = data 95 | 96 | def __len__(self): 97 | return len(self.data) 98 | 99 | def __getitem__(self, index): 100 | return self.data[index] 101 | 102 | def __setitem__(self, index, value): 103 | self.data[index] = value 104 | 105 | def __delitem__(self, index): 106 | del self.data[index] 107 | 108 | def insert(self, index, value): 109 | self.data.insert(index, value) 110 | 111 | mseq = TestMutableSequence([1, 2, 3]) 112 | 113 | # __setitem__ 114 | mseq[2] = 4 115 | self.assertEqual(mseq.data, [1, 2, 4]) 116 | 117 | # __delitem__ 118 | del mseq[2] 119 | self.assertEqual(mseq.data, [1, 2]) 120 | 121 | # insert 122 | mseq.insert(2, 3) 123 | self.assertEqual(mseq.data, [1, 2, 3]) 124 | 125 | # __iadd__ 126 | mseq += [4] 127 | self.assertEqual(mseq.data, [1, 2, 3, 4]) 128 | 129 | # append 130 | mseq.append(5) 131 | self.assertEqual(mseq.data, [1, 2, 3, 4, 5]) 132 | 133 | # extend 134 | mseq.extend([6, 7]) 135 | self.assertEqual(mseq.data, [1, 2, 3, 4, 5, 6, 7]) 136 | 137 | # pop 138 | value = mseq.pop() 139 | self.assertEqual(value, 7) 140 | self.assertEqual(mseq.data, [1, 2, 3, 4, 5, 6]) 141 | 142 | # remove 143 | mseq.remove(6) 144 | self.assertEqual(mseq.data, [1, 2, 3, 4, 5]) 145 | 146 | # reverse 147 | mseq.reverse() 148 | self.assertEqual(mseq.data, [5, 4, 3, 2, 1]) 149 | 150 | # clear 151 | mseq.clear() 152 | self.assertEqual(mseq.data, []) 153 | 154 | def test_SequenceNode(self): 155 | @plumbing(SequenceNodeBehavior, ListStorage) 156 | class SequenceNode(object): 157 | pass 158 | 159 | root = BaseNode() 160 | 161 | node = root['seq'] = SequenceNode() 162 | self.assertTrue(INode.providedBy(node)) 163 | self.assertTrue(IContentishNode.providedBy(node)) 164 | self.assertTrue(ISequenceNode.providedBy(node)) 165 | 166 | # __name__ 167 | self.assertEqual(node.name, 'seq') 168 | 169 | # __parent__ 170 | self.assertEqual(node.parent, root) 171 | 172 | # path 173 | self.assertEqual(node.path, [None, 'seq']) 174 | 175 | # root 176 | self.assertEqual(node.root, root) 177 | 178 | # acquire 179 | class INoInterface(Interface): 180 | pass 181 | 182 | self.assertEqual(node.acquire(BaseNode), root) 183 | self.assertEqual(node.acquire(IMappingNode), root) 184 | self.assertEqual(node.acquire(INoInterface), None) 185 | 186 | # detach 187 | child_0 = BaseNode() 188 | node.insert('0', child_0) 189 | child_1 = BaseNode() 190 | node.insert('1', child_1) 191 | self.assertTrue(child_0 in node) 192 | node.detach('0') 193 | self.assertFalse(child_0 in node) 194 | self.assertEqual(child_0.parent, None) 195 | self.assertEqual(child_1.name, '0') 196 | del node[:] 197 | 198 | # __index__ 199 | with self.assertRaises(IndexError): 200 | node.__index__() 201 | child_0 = SequenceNode() 202 | node.insert(0, child_0) 203 | self.assertEqual(child_0.__index__(), 0) 204 | 205 | # __getitem__ 206 | child_1 = BaseNode() 207 | node.insert(1, child_1) 208 | child_2 = BaseNode() 209 | node.insert(2, child_2) 210 | self.assertEqual(node[0], child_0) 211 | self.assertEqual(node['0'], child_0) 212 | self.assertEqual(node[:2], [child_0, child_1]) 213 | self.assertEqual(node[1:], [child_1, child_2]) 214 | 215 | # __setitem__ 216 | node[2] = BaseNode() 217 | node['2'] = BaseNode() 218 | self.assertFalse(node[2] is child_2) 219 | 220 | # __delitem__ 221 | child_2 = node['2'] 222 | del node[1] 223 | self.assertEqual(node[:], [child_0, child_2]) 224 | 225 | # insert 226 | child_1 = BaseNode() 227 | node.insert(1, child_1) 228 | self.assertEqual(node[:], [child_0, child_1, child_2]) 229 | 230 | # printtree 231 | self.checkOutput(""" 232 | : None 233 | __: seq 234 | ____: 0 235 | ____: 1 236 | ____: 2 237 | """, root.treerepr(prefix='_')) 238 | -------------------------------------------------------------------------------- /src/node/tests/test_storage.py: -------------------------------------------------------------------------------- 1 | from node.behaviors import DictStorage 2 | from node.behaviors import ListStorage 3 | from node.behaviors import MappingStorage 4 | from node.behaviors import OdictStorage 5 | from node.behaviors import SequenceStorage 6 | from node.interfaces import IMappingStorage 7 | from node.tests import NodeTestCase 8 | from odict import odict 9 | from plumber import plumbing 10 | 11 | 12 | ############################################################################### 13 | # Mock objects 14 | ############################################################################### 15 | 16 | @plumbing(MappingStorage) 17 | class MappingStorageObject(object): 18 | pass 19 | 20 | 21 | @plumbing(DictStorage) 22 | class DictStorageObject(object): 23 | pass 24 | 25 | 26 | @plumbing(OdictStorage) 27 | class OdictStorageObject(object): 28 | pass 29 | 30 | 31 | @plumbing(SequenceStorage) 32 | class SequenceStorageObject(object): 33 | pass 34 | 35 | 36 | @plumbing(ListStorage) 37 | class ListStorageObject(object): 38 | pass 39 | 40 | 41 | ############################################################################### 42 | # Tests 43 | ############################################################################### 44 | 45 | class TestStorage(NodeTestCase): 46 | 47 | def test_MappingStorage(self): 48 | obj = MappingStorageObject() 49 | self.assertTrue(IMappingStorage.providedBy(obj)) 50 | 51 | with self.assertRaises(NotImplementedError) as arc: 52 | obj.storage 53 | expected = 'Abstract ``MappingStorage`` does not implement ``storage``' 54 | self.assertEqual(str(arc.exception), expected) 55 | 56 | def test_DictStorage(self): 57 | obj = DictStorageObject() 58 | self.assertEqual(obj.storage, {}) 59 | 60 | obj['foo'] = 'foo' 61 | self.assertEqual(obj.storage, {'foo': 'foo'}) 62 | self.assertEqual(obj['foo'], 'foo') 63 | self.assertEqual([key for key in obj], ['foo']) 64 | 65 | del obj['foo'] 66 | self.assertEqual(obj.storage, {}) 67 | 68 | def test_OdictStorage(self): 69 | obj = OdictStorageObject() 70 | self.assertEqual(obj.storage, odict()) 71 | 72 | obj['foo'] = 'foo' 73 | self.assertEqual(obj.storage, odict([('foo', 'foo')])) 74 | self.assertEqual(obj['foo'], 'foo') 75 | self.assertEqual([key for key in obj], ['foo']) 76 | 77 | del obj['foo'] 78 | self.assertEqual(obj.storage, odict()) 79 | 80 | def test_SequenceStorage(self): 81 | obj = SequenceStorageObject() 82 | 83 | with self.assertRaises(NotImplementedError) as arc: 84 | obj.storage 85 | expected = 'Abstract ``SequenceStorage`` does not implement ``storage``' 86 | self.assertEqual(str(arc.exception), expected) 87 | 88 | def test_ListStorage(self): 89 | lseq = ListStorageObject() 90 | self.assertEqual(lseq.storage, []) 91 | 92 | # insert 93 | lseq.insert(0, 0) 94 | self.assertEqual(lseq.storage, [0]) 95 | 96 | # __setitem__ 97 | lseq[0] = 1 98 | self.assertEqual(lseq.storage, [1]) 99 | 100 | # __len__ 101 | self.assertEqual(len(lseq), 1) 102 | 103 | # __getitem__ 104 | self.assertEqual(lseq[0], 1) 105 | with self.assertRaises(IndexError): 106 | lseq[1] 107 | 108 | # __delitem__ 109 | del lseq[0] 110 | self.assertEqual(lseq.storage, []) 111 | with self.assertRaises(IndexError): 112 | del lseq[0] 113 | 114 | def test_BC_imports(self): 115 | from node.behaviors import Storage 116 | self.assertTrue(Storage is MappingStorage) 117 | 118 | from node.interfaces import IStorage 119 | self.assertTrue(IStorage is IMappingStorage) 120 | -------------------------------------------------------------------------------- /src/node/tests/test_tests.py: -------------------------------------------------------------------------------- 1 | from node.compat import IS_PY2 2 | from node.testing import env 3 | from node.tests import Failure 4 | from node.tests import NodeTestCase 5 | from node.tests import patch 6 | from node.tests import unittest 7 | 8 | 9 | ############################################################################### 10 | # Mock objects 11 | ############################################################################### 12 | 13 | class PatchedMockupNode(object): 14 | pass 15 | 16 | 17 | class PatchedNoNode(object): 18 | pass 19 | 20 | 21 | ############################################################################### 22 | # Tests 23 | ############################################################################### 24 | 25 | class TestPatch(unittest.TestCase): 26 | 27 | @patch(env, 'MockupNode', PatchedMockupNode) 28 | def test_patch(self): 29 | self.assertEqual(env.MockupNode, PatchedMockupNode) 30 | 31 | @patch(env, 'MockupNode', PatchedMockupNode) 32 | @patch(env, 'NoNode', PatchedNoNode) 33 | def test_multi_patch(self): 34 | self.assertEqual(env.MockupNode, PatchedMockupNode) 35 | self.assertEqual(env.NoNode, PatchedNoNode) 36 | 37 | def test_patched_raises(self): 38 | def raises(): 39 | raise Exception() 40 | 41 | @patch(env, 'MockupNode', PatchedMockupNode) 42 | @patch(env, 'NoNode', PatchedNoNode) 43 | def test_raises(): 44 | self.assertEqual(env.MockupNode.__name__, 'PatchedMockupNode') 45 | self.assertEqual(env.NoNode.__name__, 'PatchedNoNode') 46 | raises() 47 | 48 | self.assertRaises(Exception, test_raises) 49 | self.assertEqual(env.MockupNode.__name__, 'MockupNode') 50 | self.assertEqual(env.NoNode.__name__, 'NoNode') 51 | 52 | 53 | class TestNodeTestCase(NodeTestCase): 54 | 55 | def test_expectError(self): 56 | def func_raises(): 57 | raise Exception('Function raises') 58 | err = self.expectError(Exception, func_raises) 59 | self.assertEqual(str(err), 'Function raises') 60 | 61 | def func_passes(): 62 | pass 63 | err = None 64 | try: 65 | self.expectError(Exception, func_passes) 66 | except Exception as e: 67 | err = e 68 | finally: 69 | expected = ( 70 | 'Expected \'\' when calling' 71 | ' \'\' when calling \'.func_passes at' 75 | ) 76 | self.assertTrue(str(err).startswith(expected)) 77 | 78 | def test_checkOutput(self): 79 | want = '...Hello...' 80 | got = 'Leading Hello Trailing' 81 | self.checkOutput(want, got) 82 | want = 'Hello' 83 | with self.assertRaises(Failure) as arc: 84 | self.checkOutput(want, got) 85 | self.assertEqual(str(arc.exception).split('\n'), [ 86 | 'Expected:', 87 | ' Hello', 88 | 'Got:', 89 | ' Leading Hello Trailing' 90 | ]) 91 | -------------------------------------------------------------------------------- /src/node/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from node.base import BaseNode 3 | from node.tests import NodeTestCase 4 | from node.utils import AttributeAccess 5 | from node.utils import debug 6 | from node.utils import decode 7 | from node.utils import encode 8 | from node.utils import instance_property 9 | from node.utils import logger 10 | from node.utils import node_by_path 11 | from node.utils import ReverseMapping 12 | from node.utils import safe_decode 13 | from node.utils import safe_encode 14 | from node.utils import StrCodec 15 | from node.utils import UNSET 16 | from node.utils import Unset 17 | from odict import odict 18 | import copy 19 | import logging 20 | import pickle 21 | 22 | 23 | class TestUtils(NodeTestCase): 24 | 25 | def test_UNSET(self): 26 | self.assertEqual(repr(UNSET), '') 27 | self.assertEqual(str(UNSET), '') 28 | self.assertFalse(bool(UNSET)) 29 | self.assertEqual(len(UNSET), 0) 30 | self.assertTrue(copy.copy(UNSET) is UNSET) 31 | self.assertTrue(copy.deepcopy(UNSET) is UNSET) 32 | self.assertFalse(UNSET < UNSET) 33 | self.assertFalse(UNSET <= UNSET) 34 | self.assertFalse(UNSET > UNSET) 35 | self.assertFalse(UNSET >= UNSET) 36 | self.assertTrue(Unset() is UNSET) 37 | self.assertTrue(pickle.loads(pickle.dumps(UNSET)) is UNSET) 38 | 39 | def test_ReverseMapping(self): 40 | context = odict([ 41 | ('foo', 'a'), 42 | ('bar', 'b') 43 | ]) 44 | mapping = ReverseMapping(context) 45 | self.assertEqual([v for v in mapping], ['a', 'b']) 46 | self.assertEqual(mapping.keys(), ['a', 'b']) 47 | self.assertEqual(mapping.values(), ['foo', 'bar']) 48 | self.assertEqual(mapping.items(), [('a', 'foo'), ('b', 'bar')]) 49 | self.assertEqual(len(mapping), 2) 50 | self.assertTrue('a' in mapping) 51 | self.assertFalse('foo' in mapping) 52 | self.assertEqual(mapping['a'], 'foo') 53 | with self.assertRaises(KeyError) as arc: 54 | mapping['foo'] 55 | self.assertEqual(str(arc.exception), '\'foo\'') 56 | self.assertEqual(mapping.get('b'), 'bar') 57 | self.assertEqual(mapping.get('foo', 'DEFAULT'), 'DEFAULT') 58 | 59 | def test_AttributeAccess(self): 60 | context = odict([ 61 | ('foo', 'a'), 62 | ('bar', 'b') 63 | ]) 64 | attraccess = AttributeAccess(context) 65 | self.assertEqual(attraccess.foo, 'a') 66 | with self.assertRaises(AttributeError) as arc: 67 | attraccess.a 68 | self.assertEqual(str(arc.exception), 'a') 69 | attraccess.foo = 'foo' 70 | self.assertEqual(attraccess.foo, 'foo') 71 | self.assertEqual(attraccess['foo'], 'foo') 72 | attraccess['baz'] = 'bla' 73 | self.assertEqual(attraccess.baz, 'bla') 74 | del attraccess['bar'] 75 | self.assertEqual( 76 | object.__getattribute__(attraccess, 'context').keys(), 77 | ['foo', 'baz'] 78 | ) 79 | attraccess.x = 0 80 | self.assertEqual( 81 | object.__getattribute__(attraccess, 'context').keys(), 82 | ['foo', 'baz', 'x'] 83 | ) 84 | 85 | def test_encode(self): 86 | self.assertEqual( 87 | encode( 88 | b'\x01\x05\x00\x00\x00\x00\x00\x05\x15\x00\x00\x00\xd4' 89 | b'\xa0\xff\xff\xaeW\x82\xa9P\xcf8\xaf&\x0e\x00\x00' 90 | ), ( 91 | b'\x01\x05\x00\x00\x00\x00\x00\x05\x15\x00\x00\x00\xd4' 92 | b'\xa0\xff\xff\xaeW\x82\xa9P\xcf8\xaf&\x0e\x00\x00' 93 | ) 94 | ) 95 | self.assertEqual(encode(u'\xe4'), b'\xc3\xa4') 96 | self.assertEqual(encode([u'\xe4']), [b'\xc3\xa4']) 97 | self.assertEqual( 98 | encode({u'\xe4': u'\xe4'}), 99 | {b'\xc3\xa4': b'\xc3\xa4'} 100 | ) 101 | self.assertEqual(encode(b'\xc3\xa4'), b'\xc3\xa4') 102 | 103 | node = BaseNode() 104 | node.child_constraints = None 105 | node['foo'] = u'\xe4' 106 | self.assertEqual(encode(node), {b'foo': b'\xc3\xa4'}) 107 | 108 | def test_decode(self): 109 | self.assertEqual(decode(b'foo'), u'foo') 110 | self.assertEqual(decode((b'foo', u'bar')), (u'foo', u'bar')) 111 | self.assertEqual(decode({b'foo': b'bar'}), {u'foo': u'bar'}) 112 | self.assertEqual(decode(b'fo\xe4'), b'fo\xe4') 113 | 114 | node = BaseNode() 115 | node.child_constraints = None 116 | node[b'foo'] = b'\xc3\xa4' 117 | self.assertEqual(decode(node), {u'foo': u'\xe4'}) 118 | 119 | def test_StrCodec(self): 120 | codec = StrCodec(soft=False) 121 | with self.assertRaises(UnicodeDecodeError) as arc: 122 | codec.decode(b'fo\xe4') 123 | expected = ( 124 | 'codec can\'t decode byte 0xe4 in position 2: ' 125 | 'unexpected end of data' 126 | ) 127 | self.assertTrue(str(arc.exception).find(expected) > -1) 128 | 129 | def test_safe_encode(self): 130 | self.assertEqual(safe_encode(u'äöü'), b'\xc3\xa4\xc3\xb6\xc3\xbc') 131 | self.assertEqual(safe_encode(b'already_string'), b'already_string') 132 | 133 | def test_safe_decode(self): 134 | self.assertEqual(safe_decode(b'\xc3\xa4\xc3\xb6\xc3\xbc'), u'äöü') 135 | self.assertEqual(safe_decode(u'already_unicode'), u'already_unicode') 136 | 137 | def test_instance_property(self): 138 | computed = list() 139 | 140 | class InstancePropertyTest(object): 141 | 142 | @instance_property 143 | def property(self): 144 | computed.append('Computed') 145 | return 'value' 146 | 147 | obj = InstancePropertyTest() 148 | with self.assertRaises(AttributeError) as arc: 149 | obj._property 150 | expected = ( 151 | '\'InstancePropertyTest\' object has no attribute \'_property\'' 152 | ) 153 | self.assertEqual(str(arc.exception), expected) 154 | 155 | self.assertEqual(obj.property, 'value') 156 | self.assertEqual(computed, ['Computed']) 157 | computed = list() 158 | 159 | self.assertEqual(obj._property, 'value') 160 | 161 | self.assertEqual(obj.property, 'value') 162 | self.assertEqual(computed, []) 163 | 164 | def test_node_by_path(self): 165 | root = BaseNode(name='root') 166 | 167 | child = root['child'] = BaseNode() 168 | sub = child['sub'] = BaseNode() 169 | 170 | self.assertEqual(node_by_path(root, ''), root) 171 | self.assertEqual(node_by_path(root, '/'), root) 172 | self.assertEqual(node_by_path(root, []), root) 173 | 174 | self.assertEqual(node_by_path(root, 'child'), child) 175 | self.assertEqual(node_by_path(root, '/child'), child) 176 | 177 | self.assertEqual(node_by_path(root, 'child/sub'), sub) 178 | 179 | self.assertEqual(node_by_path(root, ['child']), child) 180 | 181 | self.assertEqual(node_by_path(root, ['child', 'sub']), sub) 182 | 183 | class CustomPathIterator(object): 184 | def __iter__(self): 185 | yield 'child' 186 | yield 'sub' 187 | 188 | self.assertEqual(node_by_path(root, CustomPathIterator()), sub) 189 | 190 | def test_debug_helper(self): 191 | messages = list() 192 | 193 | class TestHandler(logging.StreamHandler): 194 | def handle(self, record): 195 | messages.append(str(record)) 196 | 197 | handler = TestHandler() 198 | logger.addHandler(handler) 199 | logger.setLevel(logging.DEBUG) 200 | 201 | @debug 202 | def test_search(a, b=42): 203 | pass 204 | 205 | test_search(21) 206 | 207 | self.assertTrue(str(messages[0]).find('LogRecord: node, 10,') > -1) 208 | self.assertTrue(str(messages[0]).find('utils.py') > -1) 209 | self.assertTrue(str(messages[0]).find('"test_search: args=(21,), kws={}">') > -1) 210 | 211 | self.assertTrue(str(messages[1]).find('LogRecord: node, 10,') > -1) 212 | self.assertTrue(str(messages[1]).find('utils.py') > -1) 213 | self.assertTrue(str(messages[1]).find('"test_search: --> None">') > -1) 214 | 215 | logger.setLevel(logging.INFO) 216 | logger.removeHandler(handler) 217 | -------------------------------------------------------------------------------- /src/node/utils.py: -------------------------------------------------------------------------------- 1 | from node.compat import func_name 2 | from node.compat import iteritems 3 | from node.compat import STR_TYPE 4 | from node.compat import UNICODE_TYPE 5 | from node.interfaces import IAttributeAccess 6 | from node.interfaces import INode 7 | from zope.interface import implementer 8 | from zope.interface.common.mapping import IEnumerableMapping 9 | import logging 10 | 11 | 12 | logger = logging.getLogger('node') 13 | 14 | 15 | class Unset(object): 16 | """Identify unset values in contrast to None.""" 17 | instance = None 18 | 19 | def __new__(cls): 20 | if cls.instance is None: 21 | cls.instance = object.__new__(cls) 22 | return cls.instance 23 | 24 | def __nonzero__(self): 25 | return False 26 | 27 | __bool__ = __nonzero__ 28 | 29 | def __str__(self): 30 | return '' 31 | 32 | def __len__(self): 33 | return 0 34 | 35 | def __repr__(self): 36 | return '' 37 | 38 | def __copy__(self): 39 | return self 40 | 41 | def __deepcopy__(self, memo): 42 | return self 43 | 44 | def __lt__(self, other): 45 | return False 46 | 47 | def __le__(self, other): 48 | return False 49 | 50 | def __gt__(self, other): 51 | return False 52 | 53 | def __ge__(self, other): 54 | return False 55 | 56 | 57 | UNSET = Unset() 58 | 59 | 60 | def LocationIterator(obj): 61 | """Iterate over an object and all of its parents. 62 | 63 | Copied from ``zope.location.LocationIterator``. 64 | """ 65 | while obj is not None: 66 | yield obj 67 | obj = getattr(obj, '__parent__', None) 68 | 69 | 70 | @implementer(IEnumerableMapping) 71 | class ReverseMapping(object): 72 | """Reversed IEnumerableMapping.""" 73 | 74 | def __init__(self, context): 75 | """Object behaves as adapter for dict like object. 76 | 77 | :param context: a dict like object. 78 | """ 79 | self.context = context 80 | 81 | def __getitem__(self, value): 82 | for key in self.context: 83 | if self.context[key] == value: 84 | return key 85 | raise KeyError(value) 86 | 87 | def get(self, value, default=None): 88 | try: 89 | return self[value] 90 | except KeyError: 91 | return default 92 | 93 | def __contains__(self, value): 94 | for key in self.context: 95 | val = self.context[key] 96 | if val == value: 97 | return True 98 | return False 99 | 100 | def keys(self): 101 | return [val for val in self] 102 | 103 | def __iter__(self): 104 | for key in self.context: 105 | yield self.context[key] 106 | 107 | def values(self): 108 | return [key for key in self.context] 109 | 110 | def items(self): 111 | return [(v, k) for k, v in self.context.items()] 112 | 113 | def __len__(self): 114 | return len(self.context) 115 | 116 | 117 | @implementer(IAttributeAccess) 118 | class AttributeAccess(object): 119 | """If someone really needs to access the original context (which should 120 | not happen), she hast to use ``object.__getattr__(attraccess, 'context')``. 121 | """ 122 | 123 | def __init__(self, context): 124 | object.__setattr__(self, 'context', context) 125 | 126 | def __getattr__(self, name): 127 | context = object.__getattribute__(self, 'context') 128 | try: 129 | return context[name] 130 | except KeyError: 131 | raise AttributeError(name) 132 | 133 | def __setattr__(self, name, value): 134 | context = object.__getattribute__(self, 'context') 135 | context[name] = value 136 | 137 | def __getitem__(self, name): 138 | context = object.__getattribute__(self, 'context') 139 | return context[name] 140 | 141 | def __setitem__(self, name, value): 142 | context = object.__getattribute__(self, 'context') 143 | context[name] = value 144 | 145 | def __delitem__(self, name): 146 | context = object.__getattribute__(self, 'context') 147 | del context[name] 148 | 149 | 150 | CHARACTER_ENCODING = 'utf-8' 151 | 152 | 153 | class StrCodec(object): 154 | """Encode unicodes to strs and decode strs to unicodes. 155 | 156 | We will recursively work on arbitrarily nested structures consisting of 157 | str, unicode, list, tuple, dict and INode implementations mixed with 158 | others, which we won't touch. During that process a deep copy is produces 159 | leaving the orginal data structure intact. 160 | """ 161 | 162 | def __init__(self, encoding=CHARACTER_ENCODING, soft=True): 163 | """ 164 | :param encoding: the character encoding to decode from/encode to 165 | :param soft: if True, catch UnicodeDecodeErrors and leave this 166 | strings as-is. 167 | """ 168 | self._encoding = encoding 169 | self._soft = soft 170 | 171 | def encode(self, arg): 172 | """Return an encoded copy of the argument 173 | 174 | - strs are decoded and reencode to make sure they conform to the 175 | encoding. 176 | 177 | XXX: makes no sence, especially because a UnicodeDecodeError ends up 178 | in a recursion error due to re-trying to encode. See below. 179 | Added condition to return if str is still str after decoding. 180 | This behavior should be removed completely. 181 | 182 | - unicodes are encoded as str according to encoding 183 | 184 | - lists/tuples/dicts are recursively worked on 185 | 186 | - everything else is left untouched 187 | """ 188 | if isinstance(arg, (list, tuple)): 189 | arg = arg.__class__(map(self.encode, arg)) 190 | elif isinstance(arg, dict): 191 | arg = dict([self.encode(t) for t in iteritems(arg)]) 192 | elif isinstance(arg, bytes): 193 | arg = self.decode(arg) 194 | # If UnicodeDecodeError, binary data is expected. Return value 195 | # as is. 196 | if not isinstance(arg, bytes): 197 | arg = self.encode(arg) 198 | elif isinstance(arg, UNICODE_TYPE): 199 | arg = arg.encode(self._encoding) 200 | elif INode.providedBy(arg): 201 | arg = dict([self.encode(t) for t in iteritems(arg)]) 202 | return arg 203 | 204 | def decode(self, arg): 205 | if isinstance(arg, (list, tuple)): 206 | arg = arg.__class__(map(self.decode, arg)) 207 | elif isinstance(arg, dict): 208 | arg = dict([self.decode(t) for t in iteritems(arg)]) 209 | elif isinstance(arg, bytes): 210 | try: 211 | arg = arg.decode(self._encoding) 212 | except UnicodeDecodeError: 213 | # in soft mode we leave the string, otherwise we raise the 214 | # exception 215 | if not self._soft: 216 | raise 217 | elif INode.providedBy(arg): 218 | arg = dict([self.decode(t) for t in iteritems(arg)]) 219 | return arg 220 | 221 | 222 | strcodec = StrCodec() 223 | encode = strcodec.encode 224 | decode = strcodec.decode 225 | 226 | 227 | def safe_encode(value, encoding=CHARACTER_ENCODING): 228 | """Encode value to bytes with encoding if value not already bytes.""" 229 | if isinstance(value, UNICODE_TYPE): 230 | value = value.encode(encoding) 231 | return value 232 | 233 | 234 | def safe_decode(value, encoding=CHARACTER_ENCODING): 235 | """Decode value to string with encoding if value not already string.""" 236 | if not isinstance(value, UNICODE_TYPE): 237 | value = value.decode(encoding) 238 | return value 239 | 240 | 241 | def instance_property(func): 242 | """Decorator like ``property``, but underlying function is only called once 243 | per instance. 244 | 245 | Set instance attribute with '_' prefix. 246 | """ 247 | def wrapper(self): 248 | attribute_name = '_{}'.format(func.__name__) 249 | # do not use hasattr/getattr to avoid problems when overwriting 250 | # __getattr__ on a class which also uses instance_property 251 | try: 252 | return object.__getattribute__(self, attribute_name) 253 | except AttributeError: 254 | setattr(self, attribute_name, func(self)) 255 | return object.__getattribute__(self, attribute_name) 256 | wrapper.__doc__ = func.__doc__ 257 | return property(wrapper) 258 | 259 | 260 | def node_by_path(root, path): 261 | """Return node by path from root.""" 262 | if isinstance(path, STR_TYPE): 263 | path = path.strip('/') 264 | path = path.split('/') if path else [] 265 | if not path: 266 | return root 267 | node = root 268 | for name in path: 269 | node = node[name] 270 | return node 271 | 272 | 273 | def debug(func): 274 | """Decorator for logging debug messages.""" 275 | def wrapped(*args, **kws): 276 | logger.debug(u'{}: args={}, kws={}'.format( 277 | func_name(func), 278 | UNICODE_TYPE(args), 279 | UNICODE_TYPE(kws) 280 | )) 281 | f_result = func(*args, **kws) 282 | logger.debug(u'{}: --> {}'.format( 283 | func_name(func), 284 | UNICODE_TYPE(f_result) 285 | )) 286 | return f_result 287 | wrapped.__doc__ = func.__doc__ 288 | return wrapped 289 | --------------------------------------------------------------------------------