├── tests
├── __init__.py
├── mongoengine.png
├── fixtures.py
├── test_replicaset_connection.py
├── test_all_warnings.py
├── test_connection.py
├── test_django.py
├── test_signals.py
└── test_dynamic_document.py
├── requirements.txt
├── mongoengine
├── django
│ ├── __init__.py
│ ├── tests.py
│ ├── shortcuts.py
│ ├── sessions.py
│ ├── storage.py
│ └── auth.py
├── __init__.py
├── signals.py
├── python_support.py
├── tests.py
├── connection.py
├── dereference.py
└── document.py
├── docs
├── _themes
│ └── nature
│ │ ├── theme.conf
│ │ └── static
│ │ ├── pygments.css
│ │ └── nature.css_t
├── guide
│ ├── index.rst
│ ├── installing.rst
│ ├── signals.rst
│ ├── connecting.rst
│ ├── gridfs.rst
│ └── document-instances.rst
├── index.rst
├── code
│ └── tumblelog.py
├── apireference.rst
├── Makefile
├── django.rst
├── upgrade.rst
├── conf.py
├── tutorial.rst
└── changelog.rst
├── MANIFEST.in
├── .gitignore
├── setup.cfg
├── .travis.yml
├── LICENSE
├── python-mongoengine.spec
├── CONTRIBUTING.rst
├── AUTHORS
├── setup.py
├── README.rst
└── benchmark.py
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | pymongo
--------------------------------------------------------------------------------
/mongoengine/django/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/mongoengine.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hmarr/mongoengine/HEAD/tests/mongoengine.png
--------------------------------------------------------------------------------
/docs/_themes/nature/theme.conf:
--------------------------------------------------------------------------------
1 | [theme]
2 | inherit = basic
3 | stylesheet = nature.css
4 | pygments_style = tango
5 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include MANIFEST.in
2 | include README.rst
3 | include LICENSE
4 | include AUTHORS
5 | recursive-include docs *
6 | prune docs/_build
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .*
2 | !.gitignore
3 | *~
4 | *.py[co]
5 | .*.sw[po]
6 | *.egg
7 | docs/.build
8 | docs/_build
9 | build/
10 | dist/
11 | mongoengine.egg-info/
12 | env/
13 | .settings
14 | .project
15 | .pydevproject
16 | tests/test_bugfix.py
17 | htmlcov/
--------------------------------------------------------------------------------
/docs/guide/index.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | User Guide
3 | ==========
4 |
5 | .. toctree::
6 | :maxdepth: 2
7 |
8 | installing
9 | connecting
10 | defining-documents
11 | document-instances
12 | querying
13 | gridfs
14 | signals
15 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [nosetests]
2 | verbosity = 3
3 | detailed-errors = 1
4 | #with-coverage = 1
5 | #cover-erase = 1
6 | #cover-html = 1
7 | #cover-html-dir = ../htmlcov
8 | #cover-package = mongoengine
9 | py3where = build
10 | where = tests
11 | #tests = test_bugfix.py
--------------------------------------------------------------------------------
/tests/fixtures.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | from mongoengine import *
4 |
5 |
6 | class PickleEmbedded(EmbeddedDocument):
7 | date = DateTimeField(default=datetime.now)
8 |
9 |
10 | class PickleTest(Document):
11 | number = IntField()
12 | string = StringField(choices=(('One', '1'), ('Two', '2')))
13 | embedded = EmbeddedDocumentField(PickleEmbedded)
14 | lists = ListField(StringField())
15 | photo = FileField()
16 |
17 |
18 | class Mixin(object):
19 | name = StringField()
20 |
21 |
22 | class Base(Document):
23 | meta = {'allow_inheritance': True}
24 |
--------------------------------------------------------------------------------
/mongoengine/__init__.py:
--------------------------------------------------------------------------------
1 | import document
2 | from document import *
3 | import fields
4 | from fields import *
5 | import connection
6 | from connection import *
7 | import queryset
8 | from queryset import *
9 | import signals
10 | from signals import *
11 |
12 | __all__ = (document.__all__ + fields.__all__ + connection.__all__ +
13 | queryset.__all__ + signals.__all__)
14 |
15 | VERSION = (0, 7, 5)
16 |
17 |
18 | def get_version():
19 | if isinstance(VERSION[-1], basestring):
20 | return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
21 | return '.'.join(map(str, VERSION))
22 |
23 | __version__ = get_version()
24 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | # http://travis-ci.org/#!/MongoEngine/mongoengine
2 | language: python
3 | services: mongodb
4 | python:
5 | - 2.5
6 | - 2.6
7 | - 2.7
8 | - 3.1
9 | - 3.2
10 | env:
11 | - PYMONGO=dev
12 | - PYMONGO=2.3
13 | - PYMONGO=2.2
14 | install:
15 | - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi
16 | - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi
17 | - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
18 | - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
19 | - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
20 | - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
21 | - python setup.py install
22 | script:
23 | - python setup.py test
24 | notifications:
25 | irc: "irc.freenode.org#mongoengine"
26 | branches:
27 | only:
28 | - master
29 | - 0.7
--------------------------------------------------------------------------------
/docs/guide/installing.rst:
--------------------------------------------------------------------------------
1 | ======================
2 | Installing MongoEngine
3 | ======================
4 |
5 | To use MongoEngine, you will need to download `MongoDB `_
6 | and ensure it is running in an accessible location. You will also need
7 | `PyMongo `_ to use MongoEngine, but if you
8 | install MongoEngine using setuptools, then the dependencies will be handled for
9 | you.
10 |
11 | MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
12 |
13 | .. code-block:: console
14 |
15 | $ pip install mongoengine
16 |
17 | Alternatively, if you don't have setuptools installed, `download it from PyPi
18 | `_ and run
19 |
20 | .. code-block:: console
21 |
22 | $ python setup.py install
23 |
24 | To use the bleeding-edge version of MongoEngine, you can get the source from
25 | `GitHub `_ and install it as above:
26 |
27 | .. code-block:: console
28 |
29 | $ git clone git://github.com/hmarr/mongoengine
30 | $ cd mongoengine
31 | $ python setup.py install
32 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2009-2012 See AUTHORS
2 |
3 | Permission is hereby granted, free of charge, to any person
4 | obtaining a copy of this software and associated documentation
5 | files (the "Software"), to deal in the Software without
6 | restriction, including without limitation the rights to use,
7 | copy, modify, merge, publish, distribute, sublicense, and/or sell
8 | copies of the Software, and to permit persons to whom the
9 | Software is furnished to do so, subject to the following
10 | conditions:
11 |
12 | The above copyright notice and this permission notice shall be
13 | included in all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 | OTHER DEALINGS IN THE SOFTWARE.
23 |
--------------------------------------------------------------------------------
/tests/test_replicaset_connection.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import pymongo
4 | from pymongo import ReadPreference, ReplicaSetConnection
5 |
6 | import mongoengine
7 | from mongoengine import *
8 | from mongoengine.connection import get_db, get_connection, ConnectionError
9 |
10 |
11 | class ConnectionTest(unittest.TestCase):
12 |
13 | def tearDown(self):
14 | mongoengine.connection._connection_settings = {}
15 | mongoengine.connection._connections = {}
16 | mongoengine.connection._dbs = {}
17 |
18 | def test_replicaset_uri_passes_read_preference(self):
19 | """Requires a replica set called "rs" on port 27017
20 | """
21 |
22 | try:
23 | conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
24 | except ConnectionError, e:
25 | return
26 |
27 | if not isinstance(conn, ReplicaSetConnection):
28 | return
29 |
30 | self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
31 |
32 | if __name__ == '__main__':
33 | unittest.main()
34 |
--------------------------------------------------------------------------------
/mongoengine/django/tests.py:
--------------------------------------------------------------------------------
1 | #coding: utf-8
2 | from nose.plugins.skip import SkipTest
3 |
4 | from mongoengine.python_support import PY3
5 | from mongoengine import connect
6 |
7 | try:
8 | from django.test import TestCase
9 | from django.conf import settings
10 | except Exception as err:
11 | if PY3:
12 | from unittest import TestCase
13 | # Dummy value so no error
14 | class settings:
15 | MONGO_DATABASE_NAME = 'dummy'
16 | else:
17 | raise err
18 |
19 |
20 | class MongoTestCase(TestCase):
21 |
22 | def setUp(self):
23 | if PY3:
24 | raise SkipTest('django does not have Python 3 support')
25 |
26 | """
27 | TestCase class that clear the collection between the tests
28 | """
29 | db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
30 | def __init__(self, methodName='runtest'):
31 | self.db = connect(self.db_name).get_db()
32 | super(MongoTestCase, self).__init__(methodName)
33 |
34 | def _post_teardown(self):
35 | super(MongoTestCase, self)._post_teardown()
36 | for collection in self.db.collection_names():
37 | if collection == 'system.indexes':
38 | continue
39 | self.db.drop_collection(collection)
40 |
--------------------------------------------------------------------------------
/python-mongoengine.spec:
--------------------------------------------------------------------------------
1 | # sitelib for noarch packages, sitearch for others (remove the unneeded one)
2 | %{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")}
3 | %{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")}
4 |
5 | %define srcname mongoengine
6 |
7 | Name: python-%{srcname}
8 | Version: 0.7.5
9 | Release: 1%{?dist}
10 | Summary: A Python Document-Object Mapper for working with MongoDB
11 |
12 | Group: Development/Libraries
13 | License: MIT
14 | URL: https://github.com/MongoEngine/mongoengine
15 | Source0: %{srcname}-%{version}.tar.bz2
16 |
17 | BuildRequires: python-devel
18 | BuildRequires: python-setuptools
19 |
20 | Requires: mongodb
21 | Requires: pymongo
22 | Requires: python-blinker
23 | Requires: python-imaging
24 |
25 |
26 | %description
27 | MongoEngine is an ORM-like layer on top of PyMongo.
28 |
29 | %prep
30 | %setup -q -n %{srcname}-%{version}
31 |
32 |
33 | %build
34 | # Remove CFLAGS=... for noarch packages (unneeded)
35 | CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build
36 |
37 |
38 | %install
39 | rm -rf $RPM_BUILD_ROOT
40 | %{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT
41 |
42 | %clean
43 | rm -rf $RPM_BUILD_ROOT
44 |
45 | %files
46 | %defattr(-,root,root,-)
47 | %doc docs AUTHORS LICENSE README.rst
48 | # For noarch packages: sitelib
49 | %{python_sitelib}/*
50 | # For arch-specific packages: sitearch
51 | # %{python_sitearch}/*
52 |
53 | %changelog
54 | * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html
--------------------------------------------------------------------------------
/mongoengine/django/shortcuts.py:
--------------------------------------------------------------------------------
1 | from mongoengine.queryset import QuerySet
2 | from mongoengine.base import BaseDocument
3 | from mongoengine.base import ValidationError
4 |
5 | def _get_queryset(cls):
6 | """Inspired by django.shortcuts.*"""
7 | if isinstance(cls, QuerySet):
8 | return cls
9 | else:
10 | return cls.objects
11 |
12 | def get_document_or_404(cls, *args, **kwargs):
13 | """
14 | Uses get() to return an document, or raises a Http404 exception if the document
15 | does not exist.
16 |
17 | cls may be a Document or QuerySet object. All other passed
18 | arguments and keyword arguments are used in the get() query.
19 |
20 | Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
21 | object is found.
22 |
23 | Inspired by django.shortcuts.*
24 | """
25 | queryset = _get_queryset(cls)
26 | try:
27 | return queryset.get(*args, **kwargs)
28 | except (queryset._document.DoesNotExist, ValidationError):
29 | from django.http import Http404
30 | raise Http404('No %s matches the given query.' % queryset._document._class_name)
31 |
32 | def get_list_or_404(cls, *args, **kwargs):
33 | """
34 | Uses filter() to return a list of documents, or raise a Http404 exception if
35 | the list is empty.
36 |
37 | cls may be a Document or QuerySet object. All other passed
38 | arguments and keyword arguments are used in the filter() query.
39 |
40 | Inspired by django.shortcuts.*
41 | """
42 | queryset = _get_queryset(cls)
43 | obj_list = list(queryset.filter(*args, **kwargs))
44 | if not obj_list:
45 | from django.http import Http404
46 | raise Http404('No %s matches the given query.' % queryset._document._class_name)
47 | return obj_list
48 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | ==============================
2 | MongoEngine User Documentation
3 | ==============================
4 |
5 | **MongoEngine** is an Object-Document Mapper, written in Python for working with
6 | MongoDB. To install it, simply run
7 |
8 | .. code-block:: console
9 |
10 | # pip install -U mongoengine
11 |
12 | :doc:`tutorial`
13 | Start here for a quick overview.
14 |
15 | :doc:`guide/index`
16 | The Full guide to MongoEngine
17 |
18 | :doc:`apireference`
19 | The complete API documentation.
20 |
21 | :doc:`upgrade`
22 | How to upgrade MongoEngine.
23 |
24 | :doc:`django`
25 | Using MongoEngine and Django
26 |
27 | Community
28 | ---------
29 |
30 | To get help with using MongoEngine, use the `MongoEngine Users mailing list
31 | `_ or come chat on the
32 | `#mongoengine IRC channel `_.
33 |
34 | Contributing
35 | ------------
36 |
37 | The source is available on `GitHub `_ and
38 | contributions are always encouraged. Contributions can be as simple as
39 | minor tweaks to this documentation. To contribute, fork the project on
40 | `GitHub `_ and send a
41 | pull request.
42 |
43 | Also, you can join the developers' `mailing list
44 | `_.
45 |
46 | Changes
47 | -------
48 | See the :doc:`changelog` for a full list of changes to MongoEngine and
49 | :doc:`upgrade` for upgrade information.
50 |
51 | .. toctree::
52 | :hidden:
53 |
54 | tutorial
55 | guide/index
56 | apireference
57 | django
58 | changelog
59 | upgrade
60 |
61 | Indices and tables
62 | ------------------
63 |
64 | * :ref:`genindex`
65 | * :ref:`modindex`
66 | * :ref:`search`
67 |
68 |
--------------------------------------------------------------------------------
/mongoengine/signals.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | __all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
4 | 'pre_delete', 'post_delete']
5 |
6 | signals_available = False
7 | try:
8 | from blinker import Namespace
9 | signals_available = True
10 | except ImportError:
11 | class Namespace(object):
12 | def signal(self, name, doc=None):
13 | return _FakeSignal(name, doc)
14 |
15 | class _FakeSignal(object):
16 | """If blinker is unavailable, create a fake class with the same
17 | interface that allows sending of signals but will fail with an
18 | error on anything else. Instead of doing anything on send, it
19 | will just ignore the arguments and do nothing instead.
20 | """
21 |
22 | def __init__(self, name, doc=None):
23 | self.name = name
24 | self.__doc__ = doc
25 |
26 | def _fail(self, *args, **kwargs):
27 | raise RuntimeError('signalling support is unavailable '
28 | 'because the blinker library is '
29 | 'not installed.')
30 | send = lambda *a, **kw: None
31 | connect = disconnect = has_receivers_for = receivers_for = \
32 | temporarily_connected_to = _fail
33 | del _fail
34 |
35 | # the namespace for code signals. If you are not mongoengine code, do
36 | # not put signals in here. Create your own namespace instead.
37 | _signals = Namespace()
38 |
39 | pre_init = _signals.signal('pre_init')
40 | post_init = _signals.signal('post_init')
41 | pre_save = _signals.signal('pre_save')
42 | post_save = _signals.signal('post_save')
43 | pre_delete = _signals.signal('pre_delete')
44 | post_delete = _signals.signal('post_delete')
45 | pre_bulk_insert = _signals.signal('pre_bulk_insert')
46 | post_bulk_insert = _signals.signal('post_bulk_insert')
47 |
--------------------------------------------------------------------------------
/docs/guide/signals.rst:
--------------------------------------------------------------------------------
1 | .. _signals:
2 |
3 | Signals
4 | =======
5 |
6 | .. versionadded:: 0.5
7 |
8 | .. note::
9 |
10 | Signal support is provided by the excellent `blinker`_ library and
11 | will gracefully fall back if it is not available.
12 |
13 |
14 | The following document signals exist in MongoEngine and are pretty self-explanatory:
15 |
16 | * `mongoengine.signals.pre_init`
17 | * `mongoengine.signals.post_init`
18 | * `mongoengine.signals.pre_save`
19 | * `mongoengine.signals.post_save`
20 | * `mongoengine.signals.pre_delete`
21 | * `mongoengine.signals.post_delete`
22 | * `mongoengine.signals.pre_bulk_insert`
23 | * `mongoengine.signals.post_bulk_insert`
24 |
25 | Example usage::
26 |
27 | from mongoengine import *
28 | from mongoengine import signals
29 |
30 | class Author(Document):
31 | name = StringField()
32 |
33 | def __unicode__(self):
34 | return self.name
35 |
36 | @classmethod
37 | def pre_save(cls, sender, document, **kwargs):
38 | logging.debug("Pre Save: %s" % document.name)
39 |
40 | @classmethod
41 | def post_save(cls, sender, document, **kwargs):
42 | logging.debug("Post Save: %s" % document.name)
43 | if 'created' in kwargs:
44 | if kwargs['created']:
45 | logging.debug("Created")
46 | else:
47 | logging.debug("Updated")
48 |
49 | signals.pre_save.connect(Author.pre_save, sender=Author)
50 | signals.post_save.connect(Author.post_save, sender=Author)
51 |
52 |
53 | ReferenceFields and signals
54 | ---------------------------
55 |
56 | Currently `reverse_delete_rules` do not trigger signals on the other part of
57 | the relationship. If this is required you must manually handled the
58 | reverse deletion.
59 |
60 | .. _blinker: http://pypi.python.org/pypi/blinker
61 |
--------------------------------------------------------------------------------
/docs/code/tumblelog.py:
--------------------------------------------------------------------------------
1 | from mongoengine import *
2 |
3 | connect('tumblelog')
4 |
5 | class Comment(EmbeddedDocument):
6 | content = StringField()
7 | name = StringField(max_length=120)
8 |
9 | class User(Document):
10 | email = StringField(required=True)
11 | first_name = StringField(max_length=50)
12 | last_name = StringField(max_length=50)
13 |
14 | class Post(Document):
15 | title = StringField(max_length=120, required=True)
16 | author = ReferenceField(User)
17 | tags = ListField(StringField(max_length=30))
18 | comments = ListField(EmbeddedDocumentField(Comment))
19 |
20 | class TextPost(Post):
21 | content = StringField()
22 |
23 | class ImagePost(Post):
24 | image_path = StringField()
25 |
26 | class LinkPost(Post):
27 | link_url = StringField()
28 |
29 | Post.drop_collection()
30 |
31 | john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
32 | john.save()
33 |
34 | post1 = TextPost(title='Fun with MongoEngine', author=john)
35 | post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
36 | post1.tags = ['mongodb', 'mongoengine']
37 | post1.save()
38 |
39 | post2 = LinkPost(title='MongoEngine Documentation', author=john)
40 | post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
41 | post2.tags = ['mongoengine']
42 | post2.save()
43 |
44 | print 'ALL POSTS'
45 | print
46 | for post in Post.objects:
47 | print post.title
48 | print '=' * len(post.title)
49 |
50 | if isinstance(post, TextPost):
51 | print post.content
52 |
53 | if isinstance(post, LinkPost):
54 | print 'Link:', post.link_url
55 |
56 | print
57 | print
58 |
59 | print 'POSTS TAGGED \'MONGODB\''
60 | print
61 | for post in Post.objects(tags='mongodb'):
62 | print post.title
63 | print
64 |
65 | num_posts = Post.objects(tags='mongodb').count()
66 | print 'Found %d posts with tag "mongodb"' % num_posts
67 |
--------------------------------------------------------------------------------
/mongoengine/python_support.py:
--------------------------------------------------------------------------------
1 | """Helper functions and types to aid with Python 2.5 - 3 support."""
2 |
3 | import sys
4 |
5 | PY3 = sys.version_info[0] == 3
6 | PY25 = sys.version_info[:2] == (2, 5)
7 | UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
8 |
9 | if PY3:
10 | import codecs
11 | from io import BytesIO as StringIO
12 | # return s converted to binary. b('test') should be equivalent to b'test'
13 | def b(s):
14 | return codecs.latin_1_encode(s)[0]
15 |
16 | bin_type = bytes
17 | txt_type = str
18 | else:
19 | try:
20 | from cStringIO import StringIO
21 | except ImportError:
22 | from StringIO import StringIO
23 |
24 | # Conversion to binary only necessary in Python 3
25 | def b(s):
26 | return s
27 |
28 | bin_type = str
29 | txt_type = unicode
30 |
31 | str_types = (bin_type, txt_type)
32 |
33 | if PY25:
34 | def product(*args, **kwds):
35 | pools = map(tuple, args) * kwds.get('repeat', 1)
36 | result = [[]]
37 | for pool in pools:
38 | result = [x + [y] for x in result for y in pool]
39 | for prod in result:
40 | yield tuple(prod)
41 | reduce = reduce
42 | else:
43 | from itertools import product
44 | from functools import reduce
45 |
46 |
47 | # For use with Python 2.5
48 | # converts all keys from unicode to str for d and all nested dictionaries
49 | def to_str_keys_recursive(d):
50 | if isinstance(d, list):
51 | for val in d:
52 | if isinstance(val, (dict, list)):
53 | to_str_keys_recursive(val)
54 | elif isinstance(d, dict):
55 | for key, val in d.items():
56 | if isinstance(val, (dict, list)):
57 | to_str_keys_recursive(val)
58 | if isinstance(key, unicode):
59 | d[str(key)] = d.pop(key)
60 | else:
61 | raise ValueError("non list/dict parameter not allowed")
62 |
--------------------------------------------------------------------------------
/mongoengine/tests.py:
--------------------------------------------------------------------------------
1 | from mongoengine.connection import get_db
2 |
3 |
4 | class query_counter(object):
5 | """ Query_counter contextmanager to get the number of queries. """
6 |
7 | def __init__(self):
8 | """ Construct the query_counter. """
9 | self.counter = 0
10 | self.db = get_db()
11 |
12 | def __enter__(self):
13 | """ On every with block we need to drop the profile collection. """
14 | self.db.set_profiling_level(0)
15 | self.db.system.profile.drop()
16 | self.db.set_profiling_level(2)
17 | return self
18 |
19 | def __exit__(self, t, value, traceback):
20 | """ Reset the profiling level. """
21 | self.db.set_profiling_level(0)
22 |
23 | def __eq__(self, value):
24 | """ == Compare querycounter. """
25 | return value == self._get_count()
26 |
27 | def __ne__(self, value):
28 | """ != Compare querycounter. """
29 | return not self.__eq__(value)
30 |
31 | def __lt__(self, value):
32 | """ < Compare querycounter. """
33 | return self._get_count() < value
34 |
35 | def __le__(self, value):
36 | """ <= Compare querycounter. """
37 | return self._get_count() <= value
38 |
39 | def __gt__(self, value):
40 | """ > Compare querycounter. """
41 | return self._get_count() > value
42 |
43 | def __ge__(self, value):
44 | """ >= Compare querycounter. """
45 | return self._get_count() >= value
46 |
47 | def __int__(self):
48 | """ int representation. """
49 | return self._get_count()
50 |
51 | def __repr__(self):
52 | """ repr query_counter as the number of queries. """
53 | return u"%s" % self._get_count()
54 |
55 | def _get_count(self):
56 | """ Get the number of queries. """
57 | count = self.db.system.profile.find().count() - self.counter
58 | self.counter += 1
59 | return count
60 |
--------------------------------------------------------------------------------
/docs/apireference.rst:
--------------------------------------------------------------------------------
1 | =============
2 | API Reference
3 | =============
4 |
5 | Connecting
6 | ==========
7 |
8 | .. autofunction:: mongoengine.connect
9 | .. autofunction:: mongoengine.register_connection
10 |
11 | Documents
12 | =========
13 |
14 | .. autoclass:: mongoengine.Document
15 | :members:
16 |
17 | .. attribute:: objects
18 |
19 | A :class:`~mongoengine.queryset.QuerySet` object that is created lazily
20 | on access.
21 |
22 | .. autoclass:: mongoengine.EmbeddedDocument
23 | :members:
24 |
25 | .. autoclass:: mongoengine.DynamicDocument
26 | :members:
27 |
28 | .. autoclass:: mongoengine.DynamicEmbeddedDocument
29 | :members:
30 |
31 | .. autoclass:: mongoengine.document.MapReduceDocument
32 | :members:
33 |
34 | .. autoclass:: mongoengine.ValidationError
35 | :members:
36 |
37 | Querying
38 | ========
39 |
40 | .. autoclass:: mongoengine.queryset.QuerySet
41 | :members:
42 |
43 | .. automethod:: mongoengine.queryset.QuerySet.__call__
44 |
45 | .. autofunction:: mongoengine.queryset.queryset_manager
46 |
47 | Fields
48 | ======
49 |
50 | .. autoclass:: mongoengine.BinaryField
51 | .. autoclass:: mongoengine.BooleanField
52 | .. autoclass:: mongoengine.ComplexDateTimeField
53 | .. autoclass:: mongoengine.DateTimeField
54 | .. autoclass:: mongoengine.DecimalField
55 | .. autoclass:: mongoengine.DictField
56 | .. autoclass:: mongoengine.DynamicField
57 | .. autoclass:: mongoengine.EmailField
58 | .. autoclass:: mongoengine.EmbeddedDocumentField
59 | .. autoclass:: mongoengine.FileField
60 | .. autoclass:: mongoengine.FloatField
61 | .. autoclass:: mongoengine.GenericEmbeddedDocumentField
62 | .. autoclass:: mongoengine.GenericReferenceField
63 | .. autoclass:: mongoengine.GeoPointField
64 | .. autoclass:: mongoengine.ImageField
65 | .. autoclass:: mongoengine.IntField
66 | .. autoclass:: mongoengine.ListField
67 | .. autoclass:: mongoengine.MapField
68 | .. autoclass:: mongoengine.ObjectIdField
69 | .. autoclass:: mongoengine.ReferenceField
70 | .. autoclass:: mongoengine.SequenceField
71 | .. autoclass:: mongoengine.SortedListField
72 | .. autoclass:: mongoengine.StringField
73 | .. autoclass:: mongoengine.URLField
74 | .. autoclass:: mongoengine.UUIDField
75 |
--------------------------------------------------------------------------------
/docs/guide/connecting.rst:
--------------------------------------------------------------------------------
1 | .. _guide-connecting:
2 |
3 | =====================
4 | Connecting to MongoDB
5 | =====================
6 |
7 | To connect to a running instance of :program:`mongod`, use the
8 | :func:`~mongoengine.connect` function. The first argument is the name of the
9 | database to connect to. If the database does not exist, it will be created. If
10 | the database requires authentication, :attr:`username` and :attr:`password`
11 | arguments may be provided::
12 |
13 | from mongoengine import connect
14 | connect('project1', username='webapp', password='pwd123')
15 |
16 | By default, MongoEngine assumes that the :program:`mongod` instance is running
17 | on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
18 | provide :attr:`host` and :attr:`port` arguments to
19 | :func:`~mongoengine.connect`::
20 |
21 | connect('project1', host='192.168.1.35', port=12345)
22 |
23 | Uri style connections are also supported as long as you include the database
24 | name - just supply the uri as the :attr:`host` to
25 | :func:`~mongoengine.connect`::
26 |
27 | connect('project1', host='mongodb://localhost/database_name')
28 |
29 | ReplicaSets
30 | ===========
31 |
32 | MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
33 | to use them please use a URI style connection and provide the `replicaSet` name in the
34 | connection kwargs.
35 |
36 | Multiple Databases
37 | ==================
38 |
39 | Multiple database support was added in MongoEngine 0.6. To use multiple
40 | databases you can use :func:`~mongoengine.connect` and provide an `alias` name
41 | for the connection - if no `alias` is provided then "default" is used.
42 |
43 | In the background this uses :func:`~mongoengine.register_connection` to
44 | store the data and you can register all aliases up front if required.
45 |
46 | Individual documents can also support multiple databases by providing a
47 | `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
48 | to point across databases and collections. Below is an example schema, using
49 | 3 different databases to store data::
50 |
51 | class User(Document):
52 | name = StringField()
53 |
54 | meta = {"db_alias": "user-db"}
55 |
56 | class Book(Document):
57 | name = StringField()
58 |
59 | meta = {"db_alias": "book-db"}
60 |
61 | class AuthorBooks(Document):
62 | author = ReferenceField(User)
63 | book = ReferenceField(Book)
64 |
65 | meta = {"db_alias": "users-books-db"}
66 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing to MongoEngine
2 | ===========================
3 |
4 | MongoEngine has a large `community
5 | `_ and
6 | contributions are always encouraged. Contributions can be as simple as
7 | minor tweaks to the documentation. Please read these guidelines before
8 | sending a pull request.
9 |
10 | Bugfixes and New Features
11 | -------------------------
12 |
13 | Before starting to write code, look for existing `tickets
14 | `_ or `create one
15 | `_ for your specific
16 | issue or feature request. That way you avoid working on something
17 | that might not be of interest or that has already been addressed. If in doubt
18 | post to the `user group `
19 |
20 | Supported Interpreters
21 | ----------------------
22 |
23 | PyMongo supports CPython 2.5 and newer. Language
24 | features not supported by all interpreters can not be used.
25 | Please also ensure that your code is properly converted by
26 | `2to3 `_ for Python 3 support.
27 |
28 | Style Guide
29 | -----------
30 |
31 | MongoEngine aims to follow `PEP8 `_
32 | including 4 space indents and 79 character line limits.
33 |
34 | Testing
35 | -------
36 |
37 | All tests are run on `Travis `_
38 | and any pull requests are automatically tested by Travis. Any pull requests
39 | without tests will take longer to be integrated and might be refused.
40 |
41 | General Guidelines
42 | ------------------
43 |
44 | - Avoid backward breaking changes if at all possible.
45 | - Write inline documentation for new classes and methods.
46 | - Write tests and make sure they pass (make sure you have a mongod
47 | running on the default port, then execute ``python setup.py test``
48 | from the cmd line to run the test suite).
49 | - Add yourself to AUTHORS.rst :)
50 |
51 | Documentation
52 | -------------
53 |
54 | To contribute to the `API documentation
55 | `_
56 | just make your changes to the inline documentation of the appropriate
57 | `source code `_ or `rst file
58 | `_ in a
59 | branch and submit a `pull request `_.
60 | You might also use the github `Edit `_
61 | button.
62 |
--------------------------------------------------------------------------------
/mongoengine/django/sessions.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | from django.conf import settings
4 | from django.contrib.sessions.backends.base import SessionBase, CreateError
5 | from django.core.exceptions import SuspiciousOperation
6 | from django.utils.encoding import force_unicode
7 |
8 | from mongoengine.document import Document
9 | from mongoengine import fields
10 | from mongoengine.queryset import OperationError
11 | from mongoengine.connection import DEFAULT_CONNECTION_NAME
12 |
13 |
14 | MONGOENGINE_SESSION_DB_ALIAS = getattr(
15 | settings, 'MONGOENGINE_SESSION_DB_ALIAS',
16 | DEFAULT_CONNECTION_NAME)
17 |
18 |
19 | class MongoSession(Document):
20 | session_key = fields.StringField(primary_key=True, max_length=40)
21 | session_data = fields.StringField()
22 | expire_date = fields.DateTimeField()
23 |
24 | meta = {'collection': 'django_session',
25 | 'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
26 | 'allow_inheritance': False}
27 |
28 |
29 | class SessionStore(SessionBase):
30 | """A MongoEngine-based session store for Django.
31 | """
32 |
33 | def load(self):
34 | try:
35 | s = MongoSession.objects(session_key=self.session_key,
36 | expire_date__gt=datetime.now())[0]
37 | return self.decode(force_unicode(s.session_data))
38 | except (IndexError, SuspiciousOperation):
39 | self.create()
40 | return {}
41 |
42 | def exists(self, session_key):
43 | return bool(MongoSession.objects(session_key=session_key).first())
44 |
45 | def create(self):
46 | while True:
47 | self._session_key = self._get_new_session_key()
48 | try:
49 | self.save(must_create=True)
50 | except CreateError:
51 | continue
52 | self.modified = True
53 | self._session_cache = {}
54 | return
55 |
56 | def save(self, must_create=False):
57 | if self.session_key is None:
58 | self._session_key = self._get_new_session_key()
59 | s = MongoSession(session_key=self.session_key)
60 | s.session_data = self.encode(self._get_session(no_load=must_create))
61 | s.expire_date = self.get_expiry_date()
62 | try:
63 | s.save(force_insert=must_create, safe=True)
64 | except OperationError:
65 | if must_create:
66 | raise CreateError
67 | raise
68 |
69 | def delete(self, session_key=None):
70 | if session_key is None:
71 | if self.session_key is None:
72 | return
73 | session_key = self.session_key
74 | MongoSession.objects(session_key=session_key).delete()
75 |
--------------------------------------------------------------------------------
/docs/guide/gridfs.rst:
--------------------------------------------------------------------------------
1 | ======
2 | GridFS
3 | ======
4 |
5 | .. versionadded:: 0.4
6 |
7 | Writing
8 | -------
9 |
10 | GridFS support comes in the form of the :class:`~mongoengine.FileField` field
11 | object. This field acts as a file-like object and provides a couple of
12 | different ways of inserting and retrieving data. Arbitrary metadata such as
13 | content type can also be stored alongside the files. In the following example,
14 | a document is created to store details about animals, including a photo::
15 |
16 | class Animal(Document):
17 | genus = StringField()
18 | family = StringField()
19 | photo = FileField()
20 |
21 | marmot = Animal('Marmota', 'Sciuridae')
22 |
23 | marmot_photo = open('marmot.jpg', 'r') # Retrieve a photo from disk
24 | marmot.photo = marmot_photo # Store photo in the document
25 | marmot.photo.content_type = 'image/jpeg' # Store metadata
26 |
27 | marmot.save()
28 |
29 | Another way of writing to a :class:`~mongoengine.FileField` is to use the
30 | :func:`put` method. This allows for metadata to be stored in the same call as
31 | the file::
32 |
33 | marmot.photo.put(marmot_photo, content_type='image/jpeg')
34 |
35 | marmot.save()
36 |
37 | Retrieval
38 | ---------
39 |
40 | So using the :class:`~mongoengine.FileField` is just like using any other
41 | field. The file can also be retrieved just as easily::
42 |
43 | marmot = Animal.objects(genus='Marmota').first()
44 | photo = marmot.photo.read()
45 | content_type = marmot.photo.content_type
46 |
47 | Streaming
48 | ---------
49 |
50 | Streaming data into a :class:`~mongoengine.FileField` is achieved in a
51 | slightly different manner. First, a new file must be created by calling the
52 | :func:`new_file` method. Data can then be written using :func:`write`::
53 |
54 | marmot.photo.new_file()
55 | marmot.photo.write('some_image_data')
56 | marmot.photo.write('some_more_image_data')
57 | marmot.photo.close()
58 |
59 | marmot.photo.save()
60 |
61 | Deletion
62 | --------
63 |
64 | Deleting stored files is achieved with the :func:`delete` method::
65 |
66 | marmot.photo.delete()
67 |
68 | .. warning::
69 |
70 | The FileField in a Document actually only stores the ID of a file in a
71 | separate GridFS collection. This means that deleting a document
72 | with a defined FileField does not actually delete the file. You must be
73 | careful to delete any files in a Document as above before deleting the
74 | Document itself.
75 |
76 |
77 | Replacing files
78 | ---------------
79 |
80 | Files can be replaced with the :func:`replace` method. This works just like
81 | the :func:`put` method so even metadata can (and should) be replaced::
82 |
83 | another_marmot = open('another_marmot.png', 'r')
84 | marmot.photo.replace(another_marmot, content_type='image/png')
85 |
--------------------------------------------------------------------------------
/docs/_themes/nature/static/pygments.css:
--------------------------------------------------------------------------------
1 | .c { color: #999988; font-style: italic } /* Comment */
2 | .k { font-weight: bold } /* Keyword */
3 | .o { font-weight: bold } /* Operator */
4 | .cm { color: #999988; font-style: italic } /* Comment.Multiline */
5 | .cp { color: #999999; font-weight: bold } /* Comment.preproc */
6 | .c1 { color: #999988; font-style: italic } /* Comment.Single */
7 | .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
8 | .ge { font-style: italic } /* Generic.Emph */
9 | .gr { color: #aa0000 } /* Generic.Error */
10 | .gh { color: #999999 } /* Generic.Heading */
11 | .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
12 | .go { color: #111 } /* Generic.Output */
13 | .gp { color: #555555 } /* Generic.Prompt */
14 | .gs { font-weight: bold } /* Generic.Strong */
15 | .gu { color: #aaaaaa } /* Generic.Subheading */
16 | .gt { color: #aa0000 } /* Generic.Traceback */
17 | .kc { font-weight: bold } /* Keyword.Constant */
18 | .kd { font-weight: bold } /* Keyword.Declaration */
19 | .kp { font-weight: bold } /* Keyword.Pseudo */
20 | .kr { font-weight: bold } /* Keyword.Reserved */
21 | .kt { color: #445588; font-weight: bold } /* Keyword.Type */
22 | .m { color: #009999 } /* Literal.Number */
23 | .s { color: #bb8844 } /* Literal.String */
24 | .na { color: #008080 } /* Name.Attribute */
25 | .nb { color: #999999 } /* Name.Builtin */
26 | .nc { color: #445588; font-weight: bold } /* Name.Class */
27 | .no { color: #ff99ff } /* Name.Constant */
28 | .ni { color: #800080 } /* Name.Entity */
29 | .ne { color: #990000; font-weight: bold } /* Name.Exception */
30 | .nf { color: #990000; font-weight: bold } /* Name.Function */
31 | .nn { color: #555555 } /* Name.Namespace */
32 | .nt { color: #000080 } /* Name.Tag */
33 | .nv { color: purple } /* Name.Variable */
34 | .ow { font-weight: bold } /* Operator.Word */
35 | .mf { color: #009999 } /* Literal.Number.Float */
36 | .mh { color: #009999 } /* Literal.Number.Hex */
37 | .mi { color: #009999 } /* Literal.Number.Integer */
38 | .mo { color: #009999 } /* Literal.Number.Oct */
39 | .sb { color: #bb8844 } /* Literal.String.Backtick */
40 | .sc { color: #bb8844 } /* Literal.String.Char */
41 | .sd { color: #bb8844 } /* Literal.String.Doc */
42 | .s2 { color: #bb8844 } /* Literal.String.Double */
43 | .se { color: #bb8844 } /* Literal.String.Escape */
44 | .sh { color: #bb8844 } /* Literal.String.Heredoc */
45 | .si { color: #bb8844 } /* Literal.String.Interpol */
46 | .sx { color: #bb8844 } /* Literal.String.Other */
47 | .sr { color: #808000 } /* Literal.String.Regex */
48 | .s1 { color: #bb8844 } /* Literal.String.Single */
49 | .ss { color: #bb8844 } /* Literal.String.Symbol */
50 | .bp { color: #999999 } /* Name.Builtin.Pseudo */
51 | .vc { color: #ff99ff } /* Name.Variable.Class */
52 | .vg { color: #ff99ff } /* Name.Variable.Global */
53 | .vi { color: #ff99ff } /* Name.Variable.Instance */
54 | .il { color: #009999 } /* Literal.Number.Integer.Long */
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | The PRIMARY AUTHORS are (and/or have been):
2 |
3 | Ross Lawley
4 | Harry Marr
5 | Matt Dennewitz
6 | Deepak Thukral
7 | Florian Schlachter
8 | Steve Challis
9 | Wilson Júnior
10 | Dan Crosta https://github.com/dcrosta
11 | Laine Herron https://github.com/LaineHerron
12 |
13 | CONTRIBUTORS
14 |
15 | Dervived from the git logs, inevitably incomplete but all of whom and others
16 | have submitted patches, reported bugs and generally helped make MongoEngine
17 | that much better:
18 |
19 | * Harry Marr
20 | * Ross Lawley
21 | * blackbrrr
22 | * Florian Schlachter
23 | * Vincent Driessen
24 | * Steve Challis
25 | * flosch
26 | * Deepak Thukral
27 | * Colin Howe
28 | * Wilson Júnior
29 | * Alistair Roche
30 | * Dan Crosta
31 | * Viktor Kerkez
32 | * Stephan Jaekel
33 | * Rached Ben Mustapha
34 | * Greg Turner
35 | * Daniel Hasselrot
36 | * Mircea Pasoi
37 | * Matt Chisholm
38 | * James Punteney
39 | * TimothéePeignier
40 | * Stuart Rackham
41 | * Serge Matveenko
42 | * Matt Dennewitz
43 | * Don Spaulding
44 | * Ales Zoulek
45 | * sshwsfc
46 | * sib
47 | * Samuel Clay
48 | * Nick Vlku
49 | * martin
50 | * Flavio Amieiro
51 | * Анхбаяр Лхагвадорж
52 | * Zak Johnson
53 | * Victor Farazdagi
54 | * vandersonmota
55 | * Theo Julienne
56 | * sp
57 | * Slavi Pantaleev
58 | * Richard Henry
59 | * Nicolas Perriault
60 | * Nick Vlku Jr
61 | * Michael Henson
62 | * Leo Honkanen
63 | * kuno
64 | * Josh Ourisman
65 | * Jaime
66 | * Igor Ivanov
67 | * Gregg Lind
68 | * Gareth Lloyd
69 | * Albert Choi
70 | * John Arnfield
71 | * grubberr
72 | * Paul Aliagas
73 | * Paul Cunnane
74 | * Julien Rebetez
75 | * Marc Tamlyn
76 | * Karim Allah
77 | * Adam Parrish
78 | * jpfarias
79 | * jonrscott
80 | * Alice Zoë Bevan-McGregor
81 | * Stephen Young
82 | * tkloc
83 | * aid
84 | * yamaneko1212
85 | * dave mankoff
86 | * Alexander G. Morano
87 | * jwilder
88 | * Joe Shaw
89 | * Adam Flynn
90 | * Ankhbayar
91 | * Jan Schrewe
92 | * David Koblas
93 | * Crittercism
94 | * Alvin Liang
95 | * andrewmlevy
96 | * Chris Faulkner
97 | * Ashwin Purohit
98 | * Shalabh Aggarwal
99 | * Chris Williams
100 | * Robert Kajic
101 | * Jacob Peddicord
102 | * Nils Hasenbanck
103 | * mostlystatic
104 | * Greg Banks
105 | * swashbuckler
106 | * Adam Reeve
107 | * Anthony Nemitz
108 | * deignacio
109 | * shaunduncan
110 | * Meir Kriheli
111 | * Andrey Fedoseev
112 | * aparajita
113 | * Tristan Escalada
114 | * Alexander Koshelev
115 | * Jaime Irurzun
116 | * Alexandre González
117 | * Thomas Steinacher
118 | * Tommi Komulainen
119 | * Peter Landry
120 | * biszkoptwielki
121 | * Anton Kolechkin
122 | * Sergey Nikitin
123 | * psychogenic
124 | * Stefan Wójcik
125 | * dimonb
126 | * Garry Polley
127 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from setuptools import setup, find_packages
4 |
5 | # Hack to silence atexit traceback in newer python versions
6 | try:
7 | import multiprocessing
8 | except ImportError:
9 | pass
10 |
11 | DESCRIPTION = """MongoEngine is a Python Object-Document
12 | Mapper for working with MongoDB."""
13 | LONG_DESCRIPTION = None
14 | try:
15 | LONG_DESCRIPTION = open('README.rst').read()
16 | except:
17 | pass
18 |
19 |
20 | def get_version(version_tuple):
21 | if not isinstance(version_tuple[-1], int):
22 | return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
23 | return '.'.join(map(str, version_tuple))
24 |
25 | # Dirty hack to get version number from monogengine/__init__.py - we can't
26 | # import it as it depends on PyMongo and PyMongo isn't installed until this
27 | # file is read
28 | init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
29 | version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
30 |
31 | VERSION = get_version(eval(version_line.split('=')[-1]))
32 | print(VERSION)
33 |
34 | CLASSIFIERS = [
35 | 'Development Status :: 4 - Beta',
36 | 'Intended Audience :: Developers',
37 | 'License :: OSI Approved :: MIT License',
38 | 'Operating System :: OS Independent',
39 | 'Programming Language :: Python',
40 | "Programming Language :: Python :: 2",
41 | "Programming Language :: Python :: 2.5",
42 | "Programming Language :: Python :: 2.6",
43 | "Programming Language :: Python :: 2.7",
44 | "Programming Language :: Python :: 3",
45 | "Programming Language :: Python :: 3.1",
46 | "Programming Language :: Python :: 3.2",
47 | "Programming Language :: Python :: Implementation :: CPython",
48 | 'Topic :: Database',
49 | 'Topic :: Software Development :: Libraries :: Python Modules',
50 | ]
51 |
52 | extra_opts = {}
53 | if sys.version_info[0] == 3:
54 | extra_opts['use_2to3'] = True
55 | extra_opts['tests_require'] = ['nose', 'coverage', 'blinker']
56 | extra_opts['packages'] = find_packages(exclude=('tests',))
57 | if "test" in sys.argv or "nosetests" in sys.argv:
58 | extra_opts['packages'].append("tests")
59 | extra_opts['package_data'] = {"tests": ["mongoengine.png"]}
60 | else:
61 | extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
62 | extra_opts['packages'] = find_packages(exclude=('tests',))
63 |
64 | setup(name='mongoengine',
65 | version=VERSION,
66 | author='Harry Marr',
67 | author_email='harry.marr@{nospam}gmail.com',
68 | maintainer="Ross Lawley",
69 | maintainer_email="ross.lawley@{nospam}gmail.com",
70 | url='http://mongoengine.org/',
71 | download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
72 | license='MIT',
73 | include_package_data=True,
74 | description=DESCRIPTION,
75 | long_description=LONG_DESCRIPTION,
76 | platforms=['any'],
77 | classifiers=CLASSIFIERS,
78 | install_requires=['pymongo'],
79 | test_suite='nose.collector',
80 | **extra_opts
81 | )
82 |
--------------------------------------------------------------------------------
/tests/test_all_warnings.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import warnings
3 |
4 | from mongoengine import *
5 | from mongoengine.tests import query_counter
6 |
7 |
8 | class TestWarnings(unittest.TestCase):
9 |
10 | def setUp(self):
11 | conn = connect(db='mongoenginetest')
12 | self.warning_list = []
13 | self.showwarning_default = warnings.showwarning
14 | warnings.showwarning = self.append_to_warning_list
15 |
16 | def append_to_warning_list(self, message, category, *args):
17 | self.warning_list.append({"message": message,
18 | "category": category})
19 |
20 | def tearDown(self):
21 | # restore default handling of warnings
22 | warnings.showwarning = self.showwarning_default
23 |
24 | def test_allow_inheritance_future_warning(self):
25 | """Add FutureWarning for future allow_inhertiance default change.
26 | """
27 |
28 | class SimpleBase(Document):
29 | a = IntField()
30 |
31 | class InheritedClass(SimpleBase):
32 | b = IntField()
33 |
34 | InheritedClass()
35 | self.assertEqual(len(self.warning_list), 1)
36 | warning = self.warning_list[0]
37 | self.assertEqual(FutureWarning, warning["category"])
38 | self.assertTrue("InheritedClass" in str(warning["message"]))
39 |
40 | def test_dbref_reference_field_future_warning(self):
41 |
42 | class Person(Document):
43 | name = StringField()
44 | parent = ReferenceField('self')
45 |
46 | Person.drop_collection()
47 |
48 | p1 = Person()
49 | p1.parent = None
50 | p1.save()
51 |
52 | p2 = Person(name="Wilson Jr")
53 | p2.parent = p1
54 | p2.save(cascade=False)
55 |
56 | self.assertEqual(len(self.warning_list), 1)
57 | warning = self.warning_list[0]
58 | self.assertEqual(FutureWarning, warning["category"])
59 | self.assertTrue("ReferenceFields will default to using ObjectId"
60 | in str(warning["message"]))
61 |
62 | def test_document_save_cascade_future_warning(self):
63 |
64 | class Person(Document):
65 | name = StringField()
66 | parent = ReferenceField('self')
67 |
68 | Person.drop_collection()
69 |
70 | p1 = Person(name="Wilson Snr")
71 | p1.parent = None
72 | p1.save()
73 |
74 | p2 = Person(name="Wilson Jr")
75 | p2.parent = p1
76 | p2.parent.name = "Poppa Wilson"
77 | p2.save()
78 |
79 | self.assertEqual(len(self.warning_list), 1)
80 | warning = self.warning_list[0]
81 | self.assertEqual(FutureWarning, warning["category"])
82 | self.assertTrue("Cascading saves will default to off in 0.8"
83 | in str(warning["message"]))
84 |
85 | def test_document_collection_syntax_warning(self):
86 |
87 | class NonAbstractBase(Document):
88 | pass
89 |
90 | class InheritedDocumentFailTest(NonAbstractBase):
91 | meta = {'collection': 'fail'}
92 |
93 | warning = self.warning_list[0]
94 | self.assertEqual(SyntaxWarning, warning["category"])
95 | self.assertEqual('non_abstract_base',
96 | InheritedDocumentFailTest._get_collection_name())
97 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 |
15 | .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
16 |
17 | help:
18 | @echo "Please use \`make ' where is one of"
19 | @echo " html to make standalone HTML files"
20 | @echo " dirhtml to make HTML files named index.html in directories"
21 | @echo " pickle to make pickle files"
22 | @echo " json to make JSON files"
23 | @echo " htmlhelp to make HTML files and a HTML help project"
24 | @echo " qthelp to make HTML files and a qthelp project"
25 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
26 | @echo " changes to make an overview of all changed/added/deprecated items"
27 | @echo " linkcheck to check all external links for integrity"
28 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
29 |
30 | clean:
31 | -rm -rf $(BUILDDIR)/*
32 |
33 | html:
34 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
35 | @echo
36 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
37 |
38 | dirhtml:
39 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
40 | @echo
41 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
42 |
43 | pickle:
44 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
45 | @echo
46 | @echo "Build finished; now you can process the pickle files."
47 |
48 | json:
49 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
50 | @echo
51 | @echo "Build finished; now you can process the JSON files."
52 |
53 | htmlhelp:
54 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
55 | @echo
56 | @echo "Build finished; now you can run HTML Help Workshop with the" \
57 | ".hhp project file in $(BUILDDIR)/htmlhelp."
58 |
59 | qthelp:
60 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
61 | @echo
62 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
63 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
64 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MongoEngine.qhcp"
65 | @echo "To view the help file:"
66 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MongoEngine.qhc"
67 |
68 | latex:
69 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
70 | @echo
71 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
72 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
73 | "run these through (pdf)latex."
74 |
75 | changes:
76 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
77 | @echo
78 | @echo "The overview file is in $(BUILDDIR)/changes."
79 |
80 | linkcheck:
81 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
82 | @echo
83 | @echo "Link check complete; look for any errors in the above output " \
84 | "or in $(BUILDDIR)/linkcheck/output.txt."
85 |
86 | doctest:
87 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
88 | @echo "Testing of doctests in the sources finished, look at the " \
89 | "results in $(BUILDDIR)/doctest/output.txt."
90 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | ===========
2 | MongoEngine
3 | ===========
4 | :Info: MongoEngine is an ORM-like layer on top of PyMongo.
5 | :Repository: https://github.com/MongoEngine/mongoengine
6 | :Author: Harry Marr (http://github.com/hmarr)
7 | :Maintainer: Ross Lawley (http://github.com/rozza)
8 |
9 | .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
10 | :target: http://travis-ci.org/MongoEngine/mongoengine
11 |
12 | About
13 | =====
14 | MongoEngine is a Python Object-Document Mapper for working with MongoDB.
15 | Documentation available at http://mongoengine-odm.rtfd.org - there is currently
16 | a `tutorial `_, a `user guide
17 | `_ and an `API reference
18 | `_.
19 |
20 | Installation
21 | ============
22 | If you have `setuptools `_
23 | you can use ``easy_install -U mongoengine``. Otherwise, you can download the
24 | source from `GitHub `_ and run ``python
25 | setup.py install``.
26 |
27 | Dependencies
28 | ============
29 | - pymongo 2.1.1+
30 | - sphinx (optional - for documentation generation)
31 |
32 | Examples
33 | ========
34 | Some simple examples of what MongoEngine code looks like::
35 |
36 | class BlogPost(Document):
37 | title = StringField(required=True, max_length=200)
38 | posted = DateTimeField(default=datetime.datetime.now)
39 | tags = ListField(StringField(max_length=50))
40 |
41 | class TextPost(BlogPost):
42 | content = StringField(required=True)
43 |
44 | class LinkPost(BlogPost):
45 | url = StringField(required=True)
46 |
47 | # Create a text-based post
48 | >>> post1 = TextPost(title='Using MongoEngine', content='See the tutorial')
49 | >>> post1.tags = ['mongodb', 'mongoengine']
50 | >>> post1.save()
51 |
52 | # Create a link-based post
53 | >>> post2 = LinkPost(title='MongoEngine Docs', url='hmarr.com/mongoengine')
54 | >>> post2.tags = ['mongoengine', 'documentation']
55 | >>> post2.save()
56 |
57 | # Iterate over all posts using the BlogPost superclass
58 | >>> for post in BlogPost.objects:
59 | ... print '===', post.title, '==='
60 | ... if isinstance(post, TextPost):
61 | ... print post.content
62 | ... elif isinstance(post, LinkPost):
63 | ... print 'Link:', post.url
64 | ... print
65 | ...
66 |
67 | >>> len(BlogPost.objects)
68 | 2
69 | >>> len(HtmlPost.objects)
70 | 1
71 | >>> len(LinkPost.objects)
72 | 1
73 |
74 | # Find tagged posts
75 | >>> len(BlogPost.objects(tags='mongoengine'))
76 | 2
77 | >>> len(BlogPost.objects(tags='mongodb'))
78 | 1
79 |
80 | Tests
81 | =====
82 | To run the test suite, ensure you are running a local instance of MongoDB on
83 | the standard port, and run: ``python setup.py test``.
84 |
85 | Community
86 | =========
87 | - `MongoEngine Users mailing list
88 | `_
89 | - `MongoEngine Developers mailing list
90 | `_
91 | - `#mongoengine IRC channel `_
92 |
93 | Contributing
94 | ============
95 | We welcome contributions! see the`Contribution guidelines `_
96 |
--------------------------------------------------------------------------------
/tests/test_connection.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import pymongo
3 | import unittest
4 |
5 | import mongoengine.connection
6 |
7 | from bson.tz_util import utc
8 |
9 | from mongoengine import *
10 | from mongoengine.connection import get_db, get_connection, ConnectionError
11 |
12 |
13 | class ConnectionTest(unittest.TestCase):
14 |
15 | def tearDown(self):
16 | mongoengine.connection._connection_settings = {}
17 | mongoengine.connection._connections = {}
18 | mongoengine.connection._dbs = {}
19 |
20 | def test_connect(self):
21 | """Ensure that the connect() method works properly.
22 | """
23 | connect('mongoenginetest')
24 |
25 | conn = get_connection()
26 | self.assertTrue(isinstance(conn, pymongo.connection.Connection))
27 |
28 | db = get_db()
29 | self.assertTrue(isinstance(db, pymongo.database.Database))
30 | self.assertEqual(db.name, 'mongoenginetest')
31 |
32 | connect('mongoenginetest2', alias='testdb')
33 | conn = get_connection('testdb')
34 | self.assertTrue(isinstance(conn, pymongo.connection.Connection))
35 |
36 | def test_connect_uri(self):
37 | """Ensure that the connect() method works properly with uri's
38 | """
39 | c = connect(db='mongoenginetest', alias='admin')
40 | c.admin.system.users.remove({})
41 | c.mongoenginetest.system.users.remove({})
42 |
43 | c.admin.add_user("admin", "password")
44 | c.admin.authenticate("admin", "password")
45 | c.mongoenginetest.add_user("username", "password")
46 |
47 | self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
48 |
49 | connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
50 |
51 | conn = get_connection()
52 | self.assertTrue(isinstance(conn, pymongo.connection.Connection))
53 |
54 | db = get_db()
55 | self.assertTrue(isinstance(db, pymongo.database.Database))
56 | self.assertEqual(db.name, 'mongoenginetest')
57 |
58 | def test_register_connection(self):
59 | """Ensure that connections with different aliases may be registered.
60 | """
61 | register_connection('testdb', 'mongoenginetest2')
62 |
63 | self.assertRaises(ConnectionError, get_connection)
64 | conn = get_connection('testdb')
65 | self.assertTrue(isinstance(conn, pymongo.connection.Connection))
66 |
67 | db = get_db('testdb')
68 | self.assertTrue(isinstance(db, pymongo.database.Database))
69 | self.assertEqual(db.name, 'mongoenginetest2')
70 |
71 | def test_connection_kwargs(self):
72 | """Ensure that connection kwargs get passed to pymongo.
73 | """
74 | connect('mongoenginetest', alias='t1', tz_aware=True)
75 | conn = get_connection('t1')
76 |
77 | self.assertTrue(conn.tz_aware)
78 |
79 | connect('mongoenginetest2', alias='t2')
80 | conn = get_connection('t2')
81 | self.assertFalse(conn.tz_aware)
82 |
83 | def test_datetime(self):
84 | connect('mongoenginetest', tz_aware=True)
85 | d = datetime.datetime(2010, 5, 5, tzinfo=utc)
86 |
87 | class DateDoc(Document):
88 | the_date = DateTimeField(required=True)
89 |
90 | DateDoc.drop_collection()
91 | DateDoc(the_date=d).save()
92 |
93 | date_doc = DateDoc.objects.first()
94 | self.assertEqual(d, date_doc.the_date)
95 |
96 |
97 | if __name__ == '__main__':
98 | unittest.main()
99 |
--------------------------------------------------------------------------------
/docs/django.rst:
--------------------------------------------------------------------------------
1 | =============================
2 | Using MongoEngine with Django
3 | =============================
4 |
5 | .. note :: Updated to support Django 1.4
6 |
7 | Connecting
8 | ==========
9 | In your **settings.py** file, ignore the standard database settings (unless you
10 | also plan to use the ORM in your project), and instead call
11 | :func:`~mongoengine.connect` somewhere in the settings module.
12 |
13 | Authentication
14 | ==============
15 | MongoEngine includes a Django authentication backend, which uses MongoDB. The
16 | :class:`~mongoengine.django.auth.User` model is a MongoEngine
17 | :class:`~mongoengine.Document`, but implements most of the methods and
18 | attributes that the standard Django :class:`User` model does - so the two are
19 | moderately compatible. Using this backend will allow you to store users in
20 | MongoDB but still use many of the Django authentication infrastucture (such as
21 | the :func:`login_required` decorator and the :func:`authenticate` function). To
22 | enable the MongoEngine auth backend, add the following to you **settings.py**
23 | file::
24 |
25 | AUTHENTICATION_BACKENDS = (
26 | 'mongoengine.django.auth.MongoEngineBackend',
27 | )
28 |
29 | The :mod:`~mongoengine.django.auth` module also contains a
30 | :func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
31 | :attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
32 |
33 | .. versionadded:: 0.1.3
34 |
35 | Sessions
36 | ========
37 | Django allows the use of different backend stores for its sessions. MongoEngine
38 | provides a MongoDB-based session backend for Django, which allows you to use
39 | sessions in you Django application with just MongoDB. To enable the MongoEngine
40 | session backend, ensure that your settings module has
41 | ``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
42 | ``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
43 | ``INSTALLED_APPS``. From there, all you need to do is add the following line
44 | into you settings module::
45 |
46 | SESSION_ENGINE = 'mongoengine.django.sessions'
47 |
48 | .. versionadded:: 0.2.1
49 |
50 | Storage
51 | =======
52 | With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
53 | it is useful to have a Django file storage backend that wraps this. The new
54 | storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
55 | Using it is very similar to using the default FileSystemStorage.::
56 |
57 | from mongoengine.django.storage import GridFSStorage
58 | fs = GridFSStorage()
59 |
60 | filename = fs.save('hello.txt', 'Hello, World!')
61 |
62 | All of the `Django Storage API methods
63 | `_ have been
64 | implemented except :func:`path`. If the filename provided already exists, an
65 | underscore and a number (before # the file extension, if one exists) will be
66 | appended to the filename until the generated filename doesn't exist. The
67 | :func:`save` method will return the new filename.::
68 |
69 | >>> fs.exists('hello.txt')
70 | True
71 | >>> fs.open('hello.txt').read()
72 | 'Hello, World!'
73 | >>> fs.size('hello.txt')
74 | 13
75 | >>> fs.url('hello.txt')
76 | 'http://your_media_url/hello.txt'
77 | >>> fs.open('hello.txt').name
78 | 'hello.txt'
79 | >>> fs.listdir()
80 | ([], [u'hello.txt'])
81 |
82 | All files will be saved and retrieved in GridFS via the :class::`FileDocument`
83 | document, allowing easy access to the files without the GridFSStorage
84 | backend.::
85 |
86 | >>> from mongoengine.django.storage import FileDocument
87 | >>> FileDocument.objects()
88 | []
89 |
90 | .. versionadded:: 0.4
91 |
--------------------------------------------------------------------------------
/docs/guide/document-instances.rst:
--------------------------------------------------------------------------------
1 | ===================
2 | Documents instances
3 | ===================
4 | To create a new document object, create an instance of the relevant document
5 | class, providing values for its fields as its constructor keyword arguments.
6 | You may provide values for any of the fields on the document::
7 |
8 | >>> page = Page(title="Test Page")
9 | >>> page.title
10 | 'Test Page'
11 |
12 | You may also assign values to the document's fields using standard object
13 | attribute syntax::
14 |
15 | >>> page.title = "Example Page"
16 | >>> page.title
17 | 'Example Page'
18 |
19 | Saving and deleting documents
20 | =============================
21 | MongoEngine tracks changes to documents to provide efficient saving. To save
22 | the document to the database, call the :meth:`~mongoengine.Document.save` method.
23 | If the document does not exist in the database, it will be created. If it does
24 | already exist, then any changes will be updated atomically. For example::
25 |
26 | >>> page = Page(title="Test Page")
27 | >>> page.save() # Performs an insert
28 | >>> page.title = "My Page"
29 | >>> page.save() # Performs an atomic set on the title field.
30 |
31 | .. note::
32 |
33 | Changes to documents are tracked and on the whole perform `set` operations.
34 |
35 | * ``list_field.pop(0)`` - *sets* the resulting list
36 | * ``del(list_field)`` - *unsets* whole list
37 |
38 | .. seealso::
39 | :ref:`guide-atomic-updates`
40 |
41 | Cascading Saves
42 | ---------------
43 | If your document contains :class:`~mongoengine.ReferenceField` or
44 | :class:`~mongoengine.GenericReferenceField` objects, then by default the
45 | :meth:`~mongoengine.Document.save` method will automatically save any changes to
46 | those objects as well. If this is not desired passing :attr:`cascade` as False
47 | to the save method turns this feature off.
48 |
49 | Deleting documents
50 | ------------------
51 | To delete a document, call the :meth:`~mongoengine.Document.delete` method.
52 | Note that this will only work if the document exists in the database and has a
53 | valid :attr:`id`.
54 |
55 | Document IDs
56 | ============
57 | Each document in the database has a unique id. This may be accessed through the
58 | :attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id
59 | will be generated automatically by the database server when the object is save,
60 | meaning that you may only access the :attr:`id` field once a document has been
61 | saved::
62 |
63 | >>> page = Page(title="Test Page")
64 | >>> page.id
65 | >>> page.save()
66 | >>> page.id
67 | ObjectId('123456789abcdef000000000')
68 |
69 | Alternatively, you may define one of your own fields to be the document's
70 | "primary key" by providing ``primary_key=True`` as a keyword argument to a
71 | field's constructor. Under the hood, MongoEngine will use this field as the
72 | :attr:`id`; in fact :attr:`id` is actually aliased to your primary key field so
73 | you may still use :attr:`id` to access the primary key if you want::
74 |
75 | >>> class User(Document):
76 | ... email = StringField(primary_key=True)
77 | ... name = StringField()
78 | ...
79 | >>> bob = User(email='bob@example.com', name='Bob')
80 | >>> bob.save()
81 | >>> bob.id == bob.email == 'bob@example.com'
82 | True
83 |
84 | You can also access the document's "primary key" using the :attr:`pk` field; in
85 | is an alias to :attr:`id`::
86 |
87 | >>> page = Page(title="Another Test Page")
88 | >>> page.save()
89 | >>> page.id == page.pk
90 |
91 | .. note::
92 |
93 | If you define your own primary key field, the field implicitly becomes
94 | required, so a :class:`~mongoengine.ValidationError` will be thrown if
95 | you don't provide it.
96 |
--------------------------------------------------------------------------------
/mongoengine/django/storage.py:
--------------------------------------------------------------------------------
1 | import os
2 | import itertools
3 | import urlparse
4 |
5 | from mongoengine import *
6 | from django.conf import settings
7 | from django.core.files.storage import Storage
8 | from django.core.exceptions import ImproperlyConfigured
9 |
10 |
11 | class FileDocument(Document):
12 | """A document used to store a single file in GridFS.
13 | """
14 | file = FileField()
15 |
16 |
17 | class GridFSStorage(Storage):
18 | """A custom storage backend to store files in GridFS
19 | """
20 |
21 | def __init__(self, base_url=None):
22 |
23 | if base_url is None:
24 | base_url = settings.MEDIA_URL
25 | self.base_url = base_url
26 | self.document = FileDocument
27 | self.field = 'file'
28 |
29 | def delete(self, name):
30 | """Deletes the specified file from the storage system.
31 | """
32 | if self.exists(name):
33 | doc = self.document.objects.first()
34 | field = getattr(doc, self.field)
35 | self._get_doc_with_name(name).delete() # Delete the FileField
36 | field.delete() # Delete the FileDocument
37 |
38 | def exists(self, name):
39 | """Returns True if a file referened by the given name already exists in the
40 | storage system, or False if the name is available for a new file.
41 | """
42 | doc = self._get_doc_with_name(name)
43 | if doc:
44 | field = getattr(doc, self.field)
45 | return bool(field.name)
46 | else:
47 | return False
48 |
49 | def listdir(self, path=None):
50 | """Lists the contents of the specified path, returning a 2-tuple of lists;
51 | the first item being directories, the second item being files.
52 | """
53 | def name(doc):
54 | return getattr(doc, self.field).name
55 | docs = self.document.objects
56 | return [], [name(d) for d in docs if name(d)]
57 |
58 | def size(self, name):
59 | """Returns the total size, in bytes, of the file specified by name.
60 | """
61 | doc = self._get_doc_with_name(name)
62 | if doc:
63 | return getattr(doc, self.field).length
64 | else:
65 | raise ValueError("No such file or directory: '%s'" % name)
66 |
67 | def url(self, name):
68 | """Returns an absolute URL where the file's contents can be accessed
69 | directly by a web browser.
70 | """
71 | if self.base_url is None:
72 | raise ValueError("This file is not accessible via a URL.")
73 | return urlparse.urljoin(self.base_url, name).replace('\\', '/')
74 |
75 | def _get_doc_with_name(self, name):
76 | """Find the documents in the store with the given name
77 | """
78 | docs = self.document.objects
79 | doc = [d for d in docs if getattr(d, self.field).name == name]
80 | if doc:
81 | return doc[0]
82 | else:
83 | return None
84 |
85 | def _open(self, name, mode='rb'):
86 | doc = self._get_doc_with_name(name)
87 | if doc:
88 | return getattr(doc, self.field)
89 | else:
90 | raise ValueError("No file found with the name '%s'." % name)
91 |
92 | def get_available_name(self, name):
93 | """Returns a filename that's free on the target storage system, and
94 | available for new content to be written to.
95 | """
96 | file_root, file_ext = os.path.splitext(name)
97 | # If the filename already exists, add an underscore and a number (before
98 | # the file extension, if one exists) to the filename until the generated
99 | # filename doesn't exist.
100 | count = itertools.count(1)
101 | while self.exists(name):
102 | # file_ext includes the dot.
103 | name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
104 |
105 | return name
106 |
107 | def _save(self, name, content):
108 | doc = self.document()
109 | getattr(doc, self.field).put(content, filename=name)
110 | doc.save()
111 |
112 | return name
113 |
--------------------------------------------------------------------------------
/tests/test_django.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 | import unittest
3 | from nose.plugins.skip import SkipTest
4 | from mongoengine.python_support import PY3
5 | from mongoengine import *
6 |
7 | try:
8 | from mongoengine.django.shortcuts import get_document_or_404
9 |
10 | from django.http import Http404
11 | from django.template import Context, Template
12 | from django.conf import settings
13 | from django.core.paginator import Paginator
14 |
15 | settings.configure()
16 |
17 | from django.contrib.sessions.tests import SessionTestsMixin
18 | from mongoengine.django.sessions import SessionStore, MongoSession
19 | except Exception, err:
20 | if PY3:
21 | SessionTestsMixin = type # dummy value so no error
22 | SessionStore = None # dummy value so no error
23 | else:
24 | raise err
25 |
26 |
27 | class QuerySetTest(unittest.TestCase):
28 |
29 | def setUp(self):
30 | if PY3:
31 | raise SkipTest('django does not have Python 3 support')
32 | connect(db='mongoenginetest')
33 |
34 | class Person(Document):
35 | name = StringField()
36 | age = IntField()
37 | self.Person = Person
38 |
39 | def test_order_by_in_django_template(self):
40 | """Ensure that QuerySets are properly ordered in Django template.
41 | """
42 | self.Person.drop_collection()
43 |
44 | self.Person(name="A", age=20).save()
45 | self.Person(name="D", age=10).save()
46 | self.Person(name="B", age=40).save()
47 | self.Person(name="C", age=30).save()
48 |
49 | t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
50 |
51 | d = {"ol": self.Person.objects.order_by('-name')}
52 | self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
53 | d = {"ol": self.Person.objects.order_by('+name')}
54 | self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
55 | d = {"ol": self.Person.objects.order_by('-age')}
56 | self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
57 | d = {"ol": self.Person.objects.order_by('+age')}
58 | self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
59 |
60 | self.Person.drop_collection()
61 |
62 | def test_q_object_filter_in_template(self):
63 |
64 | self.Person.drop_collection()
65 |
66 | self.Person(name="A", age=20).save()
67 | self.Person(name="D", age=10).save()
68 | self.Person(name="B", age=40).save()
69 | self.Person(name="C", age=30).save()
70 |
71 | t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
72 |
73 | d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
74 | self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
75 |
76 | # Check double rendering doesn't throw an error
77 | self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
78 |
79 | def test_get_document_or_404(self):
80 | p = self.Person(name="G404")
81 | p.save()
82 |
83 | self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
84 | self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
85 |
86 | def test_pagination(self):
87 | """Ensure that Pagination works as expected
88 | """
89 | class Page(Document):
90 | name = StringField()
91 |
92 | Page.drop_collection()
93 |
94 | for i in xrange(1, 11):
95 | Page(name=str(i)).save()
96 |
97 | paginator = Paginator(Page.objects.all(), 2)
98 |
99 | t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
100 | for p in paginator.page_range:
101 | d = {"page": paginator.page(p)}
102 | end = p * 2
103 | start = end - 1
104 | self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
105 |
106 |
107 |
108 | class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
109 | backend = SessionStore
110 |
111 | def setUp(self):
112 | if PY3:
113 | raise SkipTest('django does not have Python 3 support')
114 | connect(db='mongoenginetest')
115 | MongoSession.drop_collection()
116 | super(MongoDBSessionTest, self).setUp()
117 |
118 | def test_first_save(self):
119 | session = SessionStore()
120 | session['test'] = True
121 | session.save()
122 | self.assertTrue('test' in session)
123 |
--------------------------------------------------------------------------------
/docs/_themes/nature/static/nature.css_t:
--------------------------------------------------------------------------------
1 | /**
2 | * Sphinx stylesheet -- default theme
3 | * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 | */
5 |
6 | @import url("basic.css");
7 |
8 | /* -- page layout ----------------------------------------------------------- */
9 |
10 | body {
11 | font-family: Arial, sans-serif;
12 | font-size: 100%;
13 | background-color: #111;
14 | color: #555;
15 | margin: 0;
16 | padding: 0;
17 | }
18 |
19 | div.documentwrapper {
20 | float: left;
21 | width: 100%;
22 | }
23 |
24 | div.bodywrapper {
25 | margin: 0 0 0 230px;
26 | }
27 |
28 | hr{
29 | border: 1px solid #B1B4B6;
30 | }
31 |
32 | div.document {
33 | background-color: #eee;
34 | }
35 |
36 | div.body {
37 | background-color: #ffffff;
38 | color: #3E4349;
39 | padding: 0 30px 30px 30px;
40 | font-size: 0.8em;
41 | }
42 |
43 | div.footer {
44 | color: #555;
45 | width: 100%;
46 | padding: 13px 0;
47 | text-align: center;
48 | font-size: 75%;
49 | }
50 |
51 | div.footer a {
52 | color: #444;
53 | text-decoration: underline;
54 | }
55 |
56 | div.related {
57 | background-color: #6BA81E;
58 | line-height: 32px;
59 | color: #fff;
60 | text-shadow: 0px 1px 0 #444;
61 | font-size: 0.80em;
62 | }
63 |
64 | div.related a {
65 | color: #E2F3CC;
66 | }
67 |
68 | div.sphinxsidebar {
69 | font-size: 0.75em;
70 | line-height: 1.5em;
71 | }
72 |
73 | div.sphinxsidebarwrapper{
74 | padding: 20px 0;
75 | }
76 |
77 | div.sphinxsidebar h3,
78 | div.sphinxsidebar h4 {
79 | font-family: Arial, sans-serif;
80 | color: #222;
81 | font-size: 1.2em;
82 | font-weight: normal;
83 | margin: 0;
84 | padding: 5px 10px;
85 | background-color: #ddd;
86 | text-shadow: 1px 1px 0 white
87 | }
88 |
89 | div.sphinxsidebar h4{
90 | font-size: 1.1em;
91 | }
92 |
93 | div.sphinxsidebar h3 a {
94 | color: #444;
95 | }
96 |
97 |
98 | div.sphinxsidebar p {
99 | color: #888;
100 | padding: 5px 20px;
101 | }
102 |
103 | div.sphinxsidebar p.topless {
104 | }
105 |
106 | div.sphinxsidebar ul {
107 | margin: 10px 20px;
108 | padding: 0;
109 | color: #000;
110 | }
111 |
112 | div.sphinxsidebar a {
113 | color: #444;
114 | }
115 |
116 | div.sphinxsidebar input {
117 | border: 1px solid #ccc;
118 | font-family: sans-serif;
119 | font-size: 1em;
120 | }
121 |
122 | div.sphinxsidebar input[type=text]{
123 | margin-left: 20px;
124 | }
125 |
126 | /* -- body styles ----------------------------------------------------------- */
127 |
128 | a {
129 | color: #005B81;
130 | text-decoration: none;
131 | }
132 |
133 | a:hover {
134 | color: #E32E00;
135 | text-decoration: underline;
136 | }
137 |
138 | div.body h1,
139 | div.body h2,
140 | div.body h3,
141 | div.body h4,
142 | div.body h5,
143 | div.body h6 {
144 | font-family: Arial, sans-serif;
145 | background-color: #BED4EB;
146 | font-weight: normal;
147 | color: #212224;
148 | margin: 30px 0px 10px 0px;
149 | padding: 5px 0 5px 10px;
150 | text-shadow: 0px 1px 0 white
151 | }
152 |
153 | div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; }
154 | div.body h2 { font-size: 150%; background-color: #C8D5E3; }
155 | div.body h3 { font-size: 120%; background-color: #D8DEE3; }
156 | div.body h4 { font-size: 110%; background-color: #D8DEE3; }
157 | div.body h5 { font-size: 100%; background-color: #D8DEE3; }
158 | div.body h6 { font-size: 100%; background-color: #D8DEE3; }
159 |
160 | a.headerlink {
161 | color: #c60f0f;
162 | font-size: 0.8em;
163 | padding: 0 4px 0 4px;
164 | text-decoration: none;
165 | }
166 |
167 | a.headerlink:hover {
168 | background-color: #c60f0f;
169 | color: white;
170 | }
171 |
172 | div.body p, div.body dd, div.body li {
173 | line-height: 1.5em;
174 | }
175 |
176 | div.admonition p.admonition-title + p {
177 | display: inline;
178 | }
179 |
180 | div.highlight{
181 | background-color: white;
182 | }
183 |
184 | div.note {
185 | background-color: #eee;
186 | border: 1px solid #ccc;
187 | }
188 |
189 | div.seealso {
190 | background-color: #ffc;
191 | border: 1px solid #ff6;
192 | }
193 |
194 | div.topic {
195 | background-color: #eee;
196 | }
197 |
198 | div.warning {
199 | background-color: #ffe4e4;
200 | border: 1px solid #f66;
201 | }
202 |
203 | p.admonition-title {
204 | display: inline;
205 | }
206 |
207 | p.admonition-title:after {
208 | content: ":";
209 | }
210 |
211 | pre {
212 | padding: 10px;
213 | background-color: White;
214 | color: #222;
215 | line-height: 1.2em;
216 | border: 1px solid #C6C9CB;
217 | font-size: 1.2em;
218 | margin: 1.5em 0 1.5em 0;
219 | -webkit-box-shadow: 1px 1px 1px #d8d8d8;
220 | -moz-box-shadow: 1px 1px 1px #d8d8d8;
221 | }
222 |
223 | tt {
224 | background-color: #ecf0f3;
225 | color: #222;
226 | padding: 1px 2px;
227 | font-size: 1.2em;
228 | font-family: monospace;
229 | }
230 |
--------------------------------------------------------------------------------
/docs/upgrade.rst:
--------------------------------------------------------------------------------
1 | =========
2 | Upgrading
3 | =========
4 |
5 | 0.6 to 0.7
6 | ==========
7 |
8 | Cascade saves
9 | -------------
10 |
11 | Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
12 | to True. This is because in 0.8 it will default to False. If you require
13 | cascading saves then either set it in the `meta` or pass
14 | via `save` eg ::
15 |
16 | # At the class level:
17 | class Person(Document):
18 | meta = {'cascade': True}
19 |
20 | # Or in code:
21 | my_document.save(cascade=True)
22 |
23 | .. note ::
24 | Remember: cascading saves **do not** cascade through lists.
25 |
26 | ReferenceFields
27 | ---------------
28 |
29 | ReferenceFields now can store references as ObjectId strings instead of DBRefs.
30 | This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
31 | will be raised.
32 |
33 |
34 | To explicitly continue to use DBRefs change the `dbref` flag
35 | to True ::
36 |
37 | class Person(Document):
38 | groups = ListField(ReferenceField(Group, dbref=True))
39 |
40 | To migrate to using strings instead of DBRefs you will have to manually
41 | migrate ::
42 |
43 | # Step 1 - Migrate the model definition
44 | class Group(Document):
45 | author = ReferenceField(User, dbref=False)
46 | members = ListField(ReferenceField(User, dbref=False))
47 |
48 | # Step 2 - Migrate the data
49 | for g in Group.objects():
50 | g.author = g.author
51 | g.members = g.members
52 | g.save()
53 |
54 |
55 | item_frequencies
56 | ----------------
57 |
58 | In the 0.6 series we added support for null / zero / false values in
59 | item_frequencies. A side effect was to return keys in the value they are
60 | stored in rather than as string representations. Your code may need to be
61 | updated to handle native types rather than strings keys for the results of
62 | item frequency queries.
63 |
64 | BinaryFields
65 | ------------
66 |
67 | Binary fields have been updated so that they are native binary types. If you
68 | previously were doing `str` comparisons with binary field values you will have
69 | to update and wrap the value in a `str`.
70 |
71 | 0.5 to 0.6
72 | ==========
73 |
74 | Embedded Documents - if you had a `pk` field you will have to rename it from
75 | `_id` to `pk` as pk is no longer a property of Embedded Documents.
76 |
77 | Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
78 | an InvalidDocument error as they aren't currently supported.
79 |
80 | Document._get_subclasses - Is no longer used and the class method has been
81 | removed.
82 |
83 | Document.objects.with_id - now raises an InvalidQueryError if used with a
84 | filter.
85 |
86 | FutureWarning - A future warning has been added to all inherited classes that
87 | don't define `allow_inheritance` in their meta.
88 |
89 | You may need to update pyMongo to 2.0 for use with Sharding.
90 |
91 | 0.4 to 0.5
92 | ===========
93 |
94 | There have been the following backwards incompatibilities from 0.4 to 0.5. The
95 | main areas of changed are: choices in fields, map_reduce and collection names.
96 |
97 | Choice options:
98 | ---------------
99 |
100 | Are now expected to be an iterable of tuples, with the first element in each
101 | tuple being the actual value to be stored. The second element is the
102 | human-readable name for the option.
103 |
104 |
105 | PyMongo / MongoDB
106 | -----------------
107 |
108 | map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
109 | `reduce_output` parameters, have been depreciated.
110 |
111 | More methods now use map_reduce as db.eval is not supported for sharding as
112 | such the following have been changed:
113 |
114 | * :meth:`~mongoengine.queryset.QuerySet.sum`
115 | * :meth:`~mongoengine.queryset.QuerySet.average`
116 | * :meth:`~mongoengine.queryset.QuerySet.item_frequencies`
117 |
118 |
119 | Default collection naming
120 | -------------------------
121 |
122 | Previously it was just lowercase, its now much more pythonic and readable as
123 | its lowercase and underscores, previously ::
124 |
125 | class MyAceDocument(Document):
126 | pass
127 |
128 | MyAceDocument._meta['collection'] == myacedocument
129 |
130 | In 0.5 this will change to ::
131 |
132 | class MyAceDocument(Document):
133 | pass
134 |
135 | MyAceDocument._get_collection_name() == my_ace_document
136 |
137 | To upgrade use a Mixin class to set meta like so ::
138 |
139 | class BaseMixin(object):
140 | meta = {
141 | 'collection': lambda c: c.__name__.lower()
142 | }
143 |
144 | class MyAceDocument(Document, BaseMixin):
145 | pass
146 |
147 | MyAceDocument._get_collection_name() == "myacedocument"
148 |
149 | Alternatively, you can rename your collections eg ::
150 |
151 | from mongoengine.connection import _get_db
152 | from mongoengine.base import _document_registry
153 |
154 | def rename_collections():
155 | db = _get_db()
156 |
157 | failure = False
158 |
159 | collection_names = [d._get_collection_name()
160 | for d in _document_registry.values()]
161 |
162 | for new_style_name in collection_names:
163 | if not new_style_name: # embedded documents don't have collections
164 | continue
165 | old_style_name = new_style_name.replace('_', '')
166 |
167 | if old_style_name == new_style_name:
168 | continue # Nothing to do
169 |
170 | existing = db.collection_names()
171 | if old_style_name in existing:
172 | if new_style_name in existing:
173 | failure = True
174 | print "FAILED to rename: %s to %s (already exists)" % (
175 | old_style_name, new_style_name)
176 | else:
177 | db[old_style_name].rename(new_style_name)
178 | print "Renamed: %s to %s" % (old_style_name,
179 | new_style_name)
180 |
181 | if failure:
182 | print "Upgrading collection names failed"
183 | else:
184 | print "Upgraded collection names"
185 |
186 |
--------------------------------------------------------------------------------
/mongoengine/connection.py:
--------------------------------------------------------------------------------
1 | import pymongo
2 | from pymongo import Connection, ReplicaSetConnection, uri_parser
3 |
4 |
5 | __all__ = ['ConnectionError', 'connect', 'register_connection',
6 | 'DEFAULT_CONNECTION_NAME']
7 |
8 |
9 | DEFAULT_CONNECTION_NAME = 'default'
10 |
11 |
12 | class ConnectionError(Exception):
13 | pass
14 |
15 |
16 | _connection_settings = {}
17 | _connections = {}
18 | _dbs = {}
19 |
20 |
21 | def register_connection(alias, name, host='localhost', port=27017,
22 | is_slave=False, read_preference=False, slaves=None,
23 | username=None, password=None, **kwargs):
24 | """Add a connection.
25 |
26 | :param alias: the name that will be used to refer to this connection
27 | throughout MongoEngine
28 | :param name: the name of the specific database to use
29 | :param host: the host name of the :program:`mongod` instance to connect to
30 | :param port: the port that the :program:`mongod` instance is running on
31 | :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
32 | :param read_preference: The read preference for the collection ** Added pymongo 2.1
33 | :param slaves: a list of aliases of slave connections; each of these must
34 | be a registered connection that has :attr:`is_slave` set to ``True``
35 | :param username: username to authenticate with
36 | :param password: password to authenticate with
37 | :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
38 |
39 | """
40 | global _connection_settings
41 |
42 | conn_settings = {
43 | 'name': name,
44 | 'host': host,
45 | 'port': port,
46 | 'is_slave': is_slave,
47 | 'slaves': slaves or [],
48 | 'username': username,
49 | 'password': password,
50 | 'read_preference': read_preference
51 | }
52 |
53 | # Handle uri style connections
54 | if "://" in host:
55 | uri_dict = uri_parser.parse_uri(host)
56 | if uri_dict.get('database') is None:
57 | raise ConnectionError("If using URI style connection include "\
58 | "database name in string")
59 | conn_settings.update({
60 | 'host': host,
61 | 'name': uri_dict.get('database'),
62 | 'username': uri_dict.get('username'),
63 | 'password': uri_dict.get('password'),
64 | 'read_preference': read_preference,
65 | })
66 | if "replicaSet" in host:
67 | conn_settings['replicaSet'] = True
68 |
69 | conn_settings.update(kwargs)
70 | _connection_settings[alias] = conn_settings
71 |
72 |
73 | def disconnect(alias=DEFAULT_CONNECTION_NAME):
74 | global _connections
75 | global _dbs
76 |
77 | if alias in _connections:
78 | get_connection(alias=alias).disconnect()
79 | del _connections[alias]
80 | if alias in _dbs:
81 | del _dbs[alias]
82 |
83 |
84 | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
85 | global _connections
86 | # Connect to the database if not already connected
87 | if reconnect:
88 | disconnect(alias)
89 |
90 | if alias not in _connections:
91 | if alias not in _connection_settings:
92 | msg = 'Connection with alias "%s" has not been defined' % alias
93 | if alias == DEFAULT_CONNECTION_NAME:
94 | msg = 'You have not defined a default connection'
95 | raise ConnectionError(msg)
96 | conn_settings = _connection_settings[alias].copy()
97 |
98 | if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
99 | conn_settings.pop('name', None)
100 | conn_settings.pop('slaves', None)
101 | conn_settings.pop('is_slave', None)
102 | conn_settings.pop('username', None)
103 | conn_settings.pop('password', None)
104 | else:
105 | # Get all the slave connections
106 | if 'slaves' in conn_settings:
107 | slaves = []
108 | for slave_alias in conn_settings['slaves']:
109 | slaves.append(get_connection(slave_alias))
110 | conn_settings['slaves'] = slaves
111 | conn_settings.pop('read_preference', None)
112 |
113 | connection_class = Connection
114 | if 'replicaSet' in conn_settings:
115 | conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
116 | # Discard port since it can't be used on ReplicaSetConnection
117 | conn_settings.pop('port', None)
118 | # Discard replicaSet if not base string
119 | if not isinstance(conn_settings['replicaSet'], basestring):
120 | conn_settings.pop('replicaSet', None)
121 | connection_class = ReplicaSetConnection
122 |
123 | try:
124 | _connections[alias] = connection_class(**conn_settings)
125 | except Exception, e:
126 | raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
127 | return _connections[alias]
128 |
129 |
130 | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
131 | global _dbs
132 | if reconnect:
133 | disconnect(alias)
134 |
135 | if alias not in _dbs:
136 | conn = get_connection(alias)
137 | conn_settings = _connection_settings[alias]
138 | _dbs[alias] = conn[conn_settings['name']]
139 | # Authenticate if necessary
140 | if conn_settings['username'] and conn_settings['password']:
141 | _dbs[alias].authenticate(conn_settings['username'],
142 | conn_settings['password'])
143 | return _dbs[alias]
144 |
145 |
146 | def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
147 | """Connect to the database specified by the 'db' argument.
148 |
149 | Connection settings may be provided here as well if the database is not
150 | running on the default port on localhost. If authentication is needed,
151 | provide username and password arguments as well.
152 |
153 | Multiple databases are supported by using aliases. Provide a separate
154 | `alias` to connect to a different instance of :program:`mongod`.
155 |
156 | .. versionchanged:: 0.6 - added multiple database support.
157 | """
158 | global _connections
159 | if alias not in _connections:
160 | register_connection(alias, db, **kwargs)
161 |
162 | return get_connection(alias)
163 |
164 | # Support old naming convention
165 | _get_connection = get_connection
166 | _get_db = get_db
167 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # MongoEngine documentation build configuration file, created by
4 | # sphinx-quickstart on Sun Nov 22 18:14:13 2009.
5 | #
6 | # This file is execfile()d with the current directory set to its containing dir.
7 | #
8 | # Note that not all possible configuration values are present in this
9 | # autogenerated file.
10 | #
11 | # All configuration values have a default; values that are commented out
12 | # serve to show the default.
13 |
14 | import sys, os
15 |
16 | # If extensions (or modules to document with autodoc) are in another directory,
17 | # add these directories to sys.path here. If the directory is relative to the
18 | # documentation root, use os.path.abspath to make it absolute, like shown here.
19 | sys.path.append(os.path.abspath('..'))
20 |
21 | # -- General configuration -----------------------------------------------------
22 |
23 | # Add any Sphinx extension module names here, as strings. They can be extensions
24 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
25 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
26 |
27 | # Add any paths that contain templates here, relative to this directory.
28 | templates_path = ['_templates']
29 |
30 | # The suffix of source filenames.
31 | source_suffix = '.rst'
32 |
33 | # The encoding of source files.
34 | #source_encoding = 'utf-8'
35 |
36 | # The master toctree document.
37 | master_doc = 'index'
38 |
39 | # General information about the project.
40 | project = u'MongoEngine'
41 | copyright = u'2009-2012, MongoEngine Authors'
42 |
43 | # The version info for the project you're documenting, acts as replacement for
44 | # |version| and |release|, also used in various other places throughout the
45 | # built documents.
46 | #
47 | import mongoengine
48 | # The short X.Y version.
49 | version = mongoengine.get_version()
50 | # The full version, including alpha/beta/rc tags.
51 | release = mongoengine.get_version()
52 |
53 | # The language for content autogenerated by Sphinx. Refer to documentation
54 | # for a list of supported languages.
55 | #language = None
56 |
57 | # There are two options for replacing |today|: either, you set today to some
58 | # non-false value, then it is used:
59 | #today = ''
60 | # Else, today_fmt is used as the format for a strftime call.
61 | #today_fmt = '%B %d, %Y'
62 |
63 | # List of documents that shouldn't be included in the build.
64 | #unused_docs = []
65 |
66 | # List of directories, relative to source directory, that shouldn't be searched
67 | # for source files.
68 | exclude_trees = ['_build']
69 |
70 | # The reST default role (used for this markup: `text`) to use for all documents.
71 | #default_role = None
72 |
73 | # If true, '()' will be appended to :func: etc. cross-reference text.
74 | #add_function_parentheses = True
75 |
76 | # If true, the current module name will be prepended to all description
77 | # unit titles (such as .. function::).
78 | #add_module_names = True
79 |
80 | # If true, sectionauthor and moduleauthor directives will be shown in the
81 | # output. They are ignored by default.
82 | #show_authors = False
83 |
84 | # The name of the Pygments (syntax highlighting) style to use.
85 | pygments_style = 'sphinx'
86 |
87 | # A list of ignored prefixes for module index sorting.
88 | #modindex_common_prefix = []
89 |
90 |
91 | # -- Options for HTML output ---------------------------------------------------
92 |
93 | # The theme to use for HTML and HTML Help pages. Major themes that come with
94 | # Sphinx are currently 'default' and 'sphinxdoc'.
95 | html_theme = 'nature'
96 |
97 | # Theme options are theme-specific and customize the look and feel of a theme
98 | # further. For a list of options available for each theme, see the
99 | # documentation.
100 | #html_theme_options = {}
101 |
102 | # Add any paths that contain custom themes here, relative to this directory.
103 | html_theme_path = ['_themes']
104 |
105 | # The name for this set of Sphinx documents. If None, it defaults to
106 | # " v documentation".
107 | #html_title = None
108 |
109 | # A shorter title for the navigation bar. Default is the same as html_title.
110 | #html_short_title = None
111 |
112 | # The name of an image file (relative to this directory) to place at the top
113 | # of the sidebar.
114 | #html_logo = None
115 |
116 | # The name of an image file (within the static path) to use as favicon of the
117 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
118 | # pixels large.
119 | #html_favicon = None
120 |
121 | # Add any paths that contain custom static files (such as style sheets) here,
122 | # relative to this directory. They are copied after the builtin static files,
123 | # so a file named "default.css" will overwrite the builtin "default.css".
124 | #html_static_path = ['_static']
125 |
126 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
127 | # using the given strftime format.
128 | #html_last_updated_fmt = '%b %d, %Y'
129 |
130 | # If true, SmartyPants will be used to convert quotes and dashes to
131 | # typographically correct entities.
132 | html_use_smartypants = True
133 |
134 | # Custom sidebar templates, maps document names to template names.
135 | #html_sidebars = {}
136 |
137 | # Additional templates that should be rendered to pages, maps page names to
138 | # template names.
139 | #html_additional_pages = {}
140 |
141 | # If false, no module index is generated.
142 | #html_use_modindex = True
143 |
144 | # If false, no index is generated.
145 | #html_use_index = True
146 |
147 | # If true, the index is split into individual pages for each letter.
148 | #html_split_index = False
149 |
150 | # If true, links to the reST sources are added to the pages.
151 | #html_show_sourcelink = True
152 |
153 | # If true, an OpenSearch description file will be output, and all pages will
154 | # contain a tag referring to it. The value of this option must be the
155 | # base URL from which the finished HTML is served.
156 | #html_use_opensearch = ''
157 |
158 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
159 | #html_file_suffix = ''
160 |
161 | # Output file base name for HTML help builder.
162 | htmlhelp_basename = 'MongoEnginedoc'
163 |
164 |
165 | # -- Options for LaTeX output --------------------------------------------------
166 |
167 | # The paper size ('letter' or 'a4').
168 | latex_paper_size = 'a4'
169 |
170 | # The font size ('10pt', '11pt' or '12pt').
171 | #latex_font_size = '10pt'
172 |
173 | # Grouping the document tree into LaTeX files. List of tuples
174 | # (source start file, target name, title, author, documentclass [howto/manual]).
175 | latex_documents = [
176 | ('index', 'MongoEngine.tex', u'MongoEngine Documentation',
177 | u'Harry Marr', 'manual'),
178 | ]
179 |
180 | # The name of an image file (relative to this directory) to place at the top of
181 | # the title page.
182 | #latex_logo = None
183 |
184 | # For "manual" documents, if this is true, then toplevel headings are parts,
185 | # not chapters.
186 | #latex_use_parts = False
187 |
188 | # Additional stuff for the LaTeX preamble.
189 | #latex_preamble = ''
190 |
191 | # Documents to append as an appendix to all manuals.
192 | #latex_appendices = []
193 |
194 | # If false, no module index is generated.
195 | #latex_use_modindex = True
196 |
--------------------------------------------------------------------------------
/benchmark.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import timeit
4 |
5 |
6 | def cprofile_main():
7 | from pymongo import Connection
8 | connection = Connection()
9 | connection.drop_database('timeit_test')
10 | connection.disconnect()
11 |
12 | from mongoengine import Document, DictField, connect
13 | connect("timeit_test")
14 |
15 | class Noddy(Document):
16 | fields = DictField()
17 |
18 | for i in xrange(1):
19 | noddy = Noddy()
20 | for j in range(20):
21 | noddy.fields["key" + str(j)] = "value " + str(j)
22 | noddy.save()
23 |
24 |
25 | def main():
26 | """
27 | 0.4 Performance Figures ...
28 |
29 | ----------------------------------------------------------------------------------------------------
30 | Creating 10000 dictionaries - Pymongo
31 | 3.86744189262
32 | ----------------------------------------------------------------------------------------------------
33 | Creating 10000 dictionaries - MongoEngine
34 | 6.23374891281
35 | ----------------------------------------------------------------------------------------------------
36 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
37 | 5.33027005196
38 | ----------------------------------------------------------------------------------------------------
39 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
40 | pass - No Cascade
41 |
42 | 0.5.X
43 | ----------------------------------------------------------------------------------------------------
44 | Creating 10000 dictionaries - Pymongo
45 | 3.89597702026
46 | ----------------------------------------------------------------------------------------------------
47 | Creating 10000 dictionaries - MongoEngine
48 | 21.7735359669
49 | ----------------------------------------------------------------------------------------------------
50 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
51 | 19.8670389652
52 | ----------------------------------------------------------------------------------------------------
53 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
54 | pass - No Cascade
55 |
56 | 0.6.X
57 | ----------------------------------------------------------------------------------------------------
58 | Creating 10000 dictionaries - Pymongo
59 | 3.81559205055
60 | ----------------------------------------------------------------------------------------------------
61 | Creating 10000 dictionaries - MongoEngine
62 | 10.0446798801
63 | ----------------------------------------------------------------------------------------------------
64 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
65 | 9.51354718208
66 | ----------------------------------------------------------------------------------------------------
67 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
68 | 9.02567505836
69 | ----------------------------------------------------------------------------------------------------
70 | Creating 10000 dictionaries - MongoEngine, force=True
71 | 8.44933390617
72 |
73 | 0.7.X
74 | ----------------------------------------------------------------------------------------------------
75 | Creating 10000 dictionaries - Pymongo
76 | 3.78801012039
77 | ----------------------------------------------------------------------------------------------------
78 | Creating 10000 dictionaries - MongoEngine
79 | 9.73050498962
80 | ----------------------------------------------------------------------------------------------------
81 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
82 | 8.33456707001
83 | ----------------------------------------------------------------------------------------------------
84 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
85 | 8.37778115273
86 | ----------------------------------------------------------------------------------------------------
87 | Creating 10000 dictionaries - MongoEngine, force=True
88 | 8.36906409264
89 | """
90 |
91 | setup = """
92 | from pymongo import Connection
93 | connection = Connection()
94 | connection.drop_database('timeit_test')
95 | """
96 |
97 | stmt = """
98 | from pymongo import Connection
99 | connection = Connection()
100 |
101 | db = connection.timeit_test
102 | noddy = db.noddy
103 |
104 | for i in xrange(10000):
105 | example = {'fields': {}}
106 | for j in range(20):
107 | example['fields']["key"+str(j)] = "value "+str(j)
108 |
109 | noddy.insert(example)
110 |
111 | myNoddys = noddy.find()
112 | [n for n in myNoddys] # iterate
113 | """
114 |
115 | print "-" * 100
116 | print """Creating 10000 dictionaries - Pymongo"""
117 | t = timeit.Timer(stmt=stmt, setup=setup)
118 | print t.timeit(1)
119 |
120 | setup = """
121 | from pymongo import Connection
122 | connection = Connection()
123 | connection.drop_database('timeit_test')
124 | connection.disconnect()
125 |
126 | from mongoengine import Document, DictField, connect
127 | connect("timeit_test")
128 |
129 | class Noddy(Document):
130 | fields = DictField()
131 | """
132 |
133 | stmt = """
134 | for i in xrange(10000):
135 | noddy = Noddy()
136 | for j in range(20):
137 | noddy.fields["key"+str(j)] = "value "+str(j)
138 | noddy.save()
139 |
140 | myNoddys = Noddy.objects()
141 | [n for n in myNoddys] # iterate
142 | """
143 |
144 | print "-" * 100
145 | print """Creating 10000 dictionaries - MongoEngine"""
146 | t = timeit.Timer(stmt=stmt, setup=setup)
147 | print t.timeit(1)
148 |
149 | stmt = """
150 | for i in xrange(10000):
151 | noddy = Noddy()
152 | for j in range(20):
153 | noddy.fields["key"+str(j)] = "value "+str(j)
154 | noddy.save(safe=False, validate=False)
155 |
156 | myNoddys = Noddy.objects()
157 | [n for n in myNoddys] # iterate
158 | """
159 |
160 | print "-" * 100
161 | print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
162 | t = timeit.Timer(stmt=stmt, setup=setup)
163 | print t.timeit(1)
164 |
165 |
166 | stmt = """
167 | for i in xrange(10000):
168 | noddy = Noddy()
169 | for j in range(20):
170 | noddy.fields["key"+str(j)] = "value "+str(j)
171 | noddy.save(safe=False, validate=False, cascade=False)
172 |
173 | myNoddys = Noddy.objects()
174 | [n for n in myNoddys] # iterate
175 | """
176 |
177 | print "-" * 100
178 | print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
179 | t = timeit.Timer(stmt=stmt, setup=setup)
180 | print t.timeit(1)
181 |
182 | stmt = """
183 | for i in xrange(10000):
184 | noddy = Noddy()
185 | for j in range(20):
186 | noddy.fields["key"+str(j)] = "value "+str(j)
187 | noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
188 |
189 | myNoddys = Noddy.objects()
190 | [n for n in myNoddys] # iterate
191 | """
192 |
193 | print "-" * 100
194 | print """Creating 10000 dictionaries - MongoEngine, force=True"""
195 | t = timeit.Timer(stmt=stmt, setup=setup)
196 | print t.timeit(1)
197 |
198 | if __name__ == "__main__":
199 | main()
200 |
--------------------------------------------------------------------------------
/mongoengine/django/auth.py:
--------------------------------------------------------------------------------
1 | import datetime
2 |
3 | from mongoengine import *
4 |
5 | from django.utils.encoding import smart_str
6 | from django.contrib.auth.models import _user_get_all_permissions
7 | from django.contrib.auth.models import _user_has_perm
8 | from django.contrib.auth.models import AnonymousUser
9 | from django.utils.translation import ugettext_lazy as _
10 |
11 | try:
12 | from django.contrib.auth.hashers import check_password, make_password
13 | except ImportError:
14 | """Handle older versions of Django"""
15 | from django.utils.hashcompat import md5_constructor, sha_constructor
16 |
17 | def get_hexdigest(algorithm, salt, raw_password):
18 | raw_password, salt = smart_str(raw_password), smart_str(salt)
19 | if algorithm == 'md5':
20 | return md5_constructor(salt + raw_password).hexdigest()
21 | elif algorithm == 'sha1':
22 | return sha_constructor(salt + raw_password).hexdigest()
23 | raise ValueError('Got unknown password algorithm type in password')
24 |
25 | def check_password(raw_password, password):
26 | algo, salt, hash = password.split('$')
27 | return hash == get_hexdigest(algo, salt, raw_password)
28 |
29 | def make_password(raw_password):
30 | from random import random
31 | algo = 'sha1'
32 | salt = get_hexdigest(algo, str(random()), str(random()))[:5]
33 | hash = get_hexdigest(algo, salt, raw_password)
34 | return '%s$%s$%s' % (algo, salt, hash)
35 |
36 |
37 | REDIRECT_FIELD_NAME = 'next'
38 |
39 | class User(Document):
40 | """A User document that aims to mirror most of the API specified by Django
41 | at http://docs.djangoproject.com/en/dev/topics/auth/#users
42 | """
43 | username = StringField(max_length=30, required=True,
44 | verbose_name=_('username'),
45 | help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
46 |
47 | first_name = StringField(max_length=30,
48 | verbose_name=_('first name'))
49 |
50 | last_name = StringField(max_length=30,
51 | verbose_name=_('last name'))
52 | email = EmailField(verbose_name=_('e-mail address'))
53 | password = StringField(max_length=128,
54 | verbose_name=_('password'),
55 | help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the change password form."))
56 | is_staff = BooleanField(default=False,
57 | verbose_name=_('staff status'),
58 | help_text=_("Designates whether the user can log into this admin site."))
59 | is_active = BooleanField(default=True,
60 | verbose_name=_('active'),
61 | help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
62 | is_superuser = BooleanField(default=False,
63 | verbose_name=_('superuser status'),
64 | help_text=_("Designates that this user has all permissions without explicitly assigning them."))
65 | last_login = DateTimeField(default=datetime.datetime.now,
66 | verbose_name=_('last login'))
67 | date_joined = DateTimeField(default=datetime.datetime.now,
68 | verbose_name=_('date joined'))
69 |
70 | meta = {
71 | 'allow_inheritance': True,
72 | 'indexes': [
73 | {'fields': ['username'], 'unique': True}
74 | ]
75 | }
76 |
77 | def __unicode__(self):
78 | return self.username
79 |
80 | def get_full_name(self):
81 | """Returns the users first and last names, separated by a space.
82 | """
83 | full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
84 | return full_name.strip()
85 |
86 | def is_anonymous(self):
87 | return False
88 |
89 | def is_authenticated(self):
90 | return True
91 |
92 | def set_password(self, raw_password):
93 | """Sets the user's password - always use this rather than directly
94 | assigning to :attr:`~mongoengine.django.auth.User.password` as the
95 | password is hashed before storage.
96 | """
97 | self.password = make_password(raw_password)
98 | self.save()
99 | return self
100 |
101 | def check_password(self, raw_password):
102 | """Checks the user's password against a provided password - always use
103 | this rather than directly comparing to
104 | :attr:`~mongoengine.django.auth.User.password` as the password is
105 | hashed before storage.
106 | """
107 | return check_password(raw_password, self.password)
108 |
109 | def get_all_permissions(self, obj=None):
110 | return _user_get_all_permissions(self, obj)
111 |
112 | def has_perm(self, perm, obj=None):
113 | """
114 | Returns True if the user has the specified permission. This method
115 | queries all available auth backends, but returns immediately if any
116 | backend returns True. Thus, a user who has permission from a single
117 | auth backend is assumed to have permission in general. If an object is
118 | provided, permissions for this specific object are checked.
119 | """
120 |
121 | # Active superusers have all permissions.
122 | if self.is_active and self.is_superuser:
123 | return True
124 |
125 | # Otherwise we need to check the backends.
126 | return _user_has_perm(self, perm, obj)
127 |
128 | @classmethod
129 | def create_user(cls, username, password, email=None):
130 | """Create (and save) a new user with the given username, password and
131 | email address.
132 | """
133 | now = datetime.datetime.now()
134 |
135 | # Normalize the address by lowercasing the domain part of the email
136 | # address.
137 | if email is not None:
138 | try:
139 | email_name, domain_part = email.strip().split('@', 1)
140 | except ValueError:
141 | pass
142 | else:
143 | email = '@'.join([email_name, domain_part.lower()])
144 |
145 | user = cls(username=username, email=email, date_joined=now)
146 | user.set_password(password)
147 | user.save()
148 | return user
149 |
150 | def get_and_delete_messages(self):
151 | return []
152 |
153 |
154 | class MongoEngineBackend(object):
155 | """Authenticate using MongoEngine and mongoengine.django.auth.User.
156 | """
157 |
158 | supports_object_permissions = False
159 | supports_anonymous_user = False
160 | supports_inactive_user = False
161 |
162 | def authenticate(self, username=None, password=None):
163 | user = User.objects(username=username).first()
164 | if user:
165 | if password and user.check_password(password):
166 | return user
167 | return None
168 |
169 | def get_user(self, user_id):
170 | return User.objects.with_id(user_id)
171 |
172 |
173 | def get_user(userid):
174 | """Returns a User object from an id (User.id). Django's equivalent takes
175 | request, but taking an id instead leaves it up to the developer to store
176 | the id in any way they want (session, signed cookie, etc.)
177 | """
178 | if not userid:
179 | return AnonymousUser()
180 | return MongoEngineBackend().get_user(userid) or AnonymousUser()
181 |
--------------------------------------------------------------------------------
/tests/test_signals.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import unittest
3 |
4 | from mongoengine import *
5 | from mongoengine import signals
6 |
7 | signal_output = []
8 |
9 |
10 | class SignalTests(unittest.TestCase):
11 | """
12 | Testing signals before/after saving and deleting.
13 | """
14 |
15 | def get_signal_output(self, fn, *args, **kwargs):
16 | # Flush any existing signal output
17 | global signal_output
18 | signal_output = []
19 | fn(*args, **kwargs)
20 | return signal_output
21 |
22 | def setUp(self):
23 | connect(db='mongoenginetest')
24 | class Author(Document):
25 | name = StringField()
26 |
27 | def __unicode__(self):
28 | return self.name
29 |
30 | @classmethod
31 | def pre_init(cls, sender, document, *args, **kwargs):
32 | signal_output.append('pre_init signal, %s' % cls.__name__)
33 | signal_output.append(str(kwargs['values']))
34 |
35 | @classmethod
36 | def post_init(cls, sender, document, **kwargs):
37 | signal_output.append('post_init signal, %s' % document)
38 |
39 | @classmethod
40 | def pre_save(cls, sender, document, **kwargs):
41 | signal_output.append('pre_save signal, %s' % document)
42 |
43 | @classmethod
44 | def post_save(cls, sender, document, **kwargs):
45 | signal_output.append('post_save signal, %s' % document)
46 | if 'created' in kwargs:
47 | if kwargs['created']:
48 | signal_output.append('Is created')
49 | else:
50 | signal_output.append('Is updated')
51 |
52 | @classmethod
53 | def pre_delete(cls, sender, document, **kwargs):
54 | signal_output.append('pre_delete signal, %s' % document)
55 |
56 | @classmethod
57 | def post_delete(cls, sender, document, **kwargs):
58 | signal_output.append('post_delete signal, %s' % document)
59 |
60 | @classmethod
61 | def pre_bulk_insert(cls, sender, documents, **kwargs):
62 | signal_output.append('pre_bulk_insert signal, %s' % documents)
63 |
64 | @classmethod
65 | def post_bulk_insert(cls, sender, documents, **kwargs):
66 | signal_output.append('post_bulk_insert signal, %s' % documents)
67 | if kwargs.get('loaded', False):
68 | signal_output.append('Is loaded')
69 | else:
70 | signal_output.append('Not loaded')
71 | self.Author = Author
72 |
73 |
74 | class Another(Document):
75 | name = StringField()
76 |
77 | def __unicode__(self):
78 | return self.name
79 |
80 | @classmethod
81 | def pre_init(cls, sender, document, **kwargs):
82 | signal_output.append('pre_init Another signal, %s' % cls.__name__)
83 | signal_output.append(str(kwargs['values']))
84 |
85 | @classmethod
86 | def post_init(cls, sender, document, **kwargs):
87 | signal_output.append('post_init Another signal, %s' % document)
88 |
89 | @classmethod
90 | def pre_save(cls, sender, document, **kwargs):
91 | signal_output.append('pre_save Another signal, %s' % document)
92 |
93 | @classmethod
94 | def post_save(cls, sender, document, **kwargs):
95 | signal_output.append('post_save Another signal, %s' % document)
96 | if 'created' in kwargs:
97 | if kwargs['created']:
98 | signal_output.append('Is created')
99 | else:
100 | signal_output.append('Is updated')
101 |
102 | @classmethod
103 | def pre_delete(cls, sender, document, **kwargs):
104 | signal_output.append('pre_delete Another signal, %s' % document)
105 |
106 | @classmethod
107 | def post_delete(cls, sender, document, **kwargs):
108 | signal_output.append('post_delete Another signal, %s' % document)
109 |
110 | self.Another = Another
111 | # Save up the number of connected signals so that we can check at the end
112 | # that all the signals we register get properly unregistered
113 | self.pre_signals = (
114 | len(signals.pre_init.receivers),
115 | len(signals.post_init.receivers),
116 | len(signals.pre_save.receivers),
117 | len(signals.post_save.receivers),
118 | len(signals.pre_delete.receivers),
119 | len(signals.post_delete.receivers),
120 | len(signals.pre_bulk_insert.receivers),
121 | len(signals.post_bulk_insert.receivers),
122 | )
123 |
124 | signals.pre_init.connect(Author.pre_init, sender=Author)
125 | signals.post_init.connect(Author.post_init, sender=Author)
126 | signals.pre_save.connect(Author.pre_save, sender=Author)
127 | signals.post_save.connect(Author.post_save, sender=Author)
128 | signals.pre_delete.connect(Author.pre_delete, sender=Author)
129 | signals.post_delete.connect(Author.post_delete, sender=Author)
130 | signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
131 | signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
132 |
133 | signals.pre_init.connect(Another.pre_init, sender=Another)
134 | signals.post_init.connect(Another.post_init, sender=Another)
135 | signals.pre_save.connect(Another.pre_save, sender=Another)
136 | signals.post_save.connect(Another.post_save, sender=Another)
137 | signals.pre_delete.connect(Another.pre_delete, sender=Another)
138 | signals.post_delete.connect(Another.post_delete, sender=Another)
139 |
140 | def tearDown(self):
141 | signals.pre_init.disconnect(self.Author.pre_init)
142 | signals.post_init.disconnect(self.Author.post_init)
143 | signals.post_delete.disconnect(self.Author.post_delete)
144 | signals.pre_delete.disconnect(self.Author.pre_delete)
145 | signals.post_save.disconnect(self.Author.post_save)
146 | signals.pre_save.disconnect(self.Author.pre_save)
147 | signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
148 | signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
149 |
150 | signals.pre_init.disconnect(self.Another.pre_init)
151 | signals.post_init.disconnect(self.Another.post_init)
152 | signals.post_delete.disconnect(self.Another.post_delete)
153 | signals.pre_delete.disconnect(self.Another.pre_delete)
154 | signals.post_save.disconnect(self.Another.post_save)
155 | signals.pre_save.disconnect(self.Another.pre_save)
156 |
157 | # Check that all our signals got disconnected properly.
158 | post_signals = (
159 | len(signals.pre_init.receivers),
160 | len(signals.post_init.receivers),
161 | len(signals.pre_save.receivers),
162 | len(signals.post_save.receivers),
163 | len(signals.pre_delete.receivers),
164 | len(signals.post_delete.receivers),
165 | len(signals.pre_bulk_insert.receivers),
166 | len(signals.post_bulk_insert.receivers),
167 | )
168 |
169 | self.assertEqual(self.pre_signals, post_signals)
170 |
171 | def test_model_signals(self):
172 | """ Model saves should throw some signals. """
173 |
174 | def create_author():
175 | a1 = self.Author(name='Bill Shakespeare')
176 |
177 | def bulk_create_author_with_load():
178 | a1 = self.Author(name='Bill Shakespeare')
179 | self.Author.objects.insert([a1], load_bulk=True)
180 |
181 | def bulk_create_author_without_load():
182 | a1 = self.Author(name='Bill Shakespeare')
183 | self.Author.objects.insert([a1], load_bulk=False)
184 |
185 | self.assertEqual(self.get_signal_output(create_author), [
186 | "pre_init signal, Author",
187 | "{'name': 'Bill Shakespeare'}",
188 | "post_init signal, Bill Shakespeare",
189 | ])
190 |
191 | a1 = self.Author(name='Bill Shakespeare')
192 | self.assertEqual(self.get_signal_output(a1.save), [
193 | "pre_save signal, Bill Shakespeare",
194 | "post_save signal, Bill Shakespeare",
195 | "Is created"
196 | ])
197 |
198 | a1.reload()
199 | a1.name='William Shakespeare'
200 | self.assertEqual(self.get_signal_output(a1.save), [
201 | "pre_save signal, William Shakespeare",
202 | "post_save signal, William Shakespeare",
203 | "Is updated"
204 | ])
205 |
206 | self.assertEqual(self.get_signal_output(a1.delete), [
207 | 'pre_delete signal, William Shakespeare',
208 | 'post_delete signal, William Shakespeare',
209 | ])
210 |
211 | signal_output = self.get_signal_output(bulk_create_author_with_load)
212 |
213 | # The output of this signal is not entirely deterministic. The reloaded
214 | # object will have an object ID. Hence, we only check part of the output
215 | self.assertEqual(signal_output[3],
216 | "pre_bulk_insert signal, []")
217 | self.assertEqual(signal_output[-2:],
218 | ["post_bulk_insert signal, []",
219 | "Is loaded",])
220 |
221 | self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
222 | "pre_init signal, Author",
223 | "{'name': 'Bill Shakespeare'}",
224 | "post_init signal, Bill Shakespeare",
225 | "pre_bulk_insert signal, []",
226 | "post_bulk_insert signal, []",
227 | "Not loaded",
228 | ])
229 |
230 | self.Author.objects.delete()
231 |
--------------------------------------------------------------------------------
/mongoengine/dereference.py:
--------------------------------------------------------------------------------
1 | from bson import DBRef, SON
2 |
3 | from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
4 | from fields import (ReferenceField, ListField, DictField, MapField)
5 | from connection import get_db
6 | from queryset import QuerySet
7 | from document import Document
8 |
9 |
10 | class DeReference(object):
11 |
12 | def __call__(self, items, max_depth=1, instance=None, name=None):
13 | """
14 | Cheaply dereferences the items to a set depth.
15 | Also handles the convertion of complex data types.
16 |
17 | :param items: The iterable (dict, list, queryset) to be dereferenced.
18 | :param max_depth: The maximum depth to recurse to
19 | :param instance: The owning instance used for tracking changes by
20 | :class:`~mongoengine.base.ComplexBaseField`
21 | :param name: The name of the field, used for tracking changes by
22 | :class:`~mongoengine.base.ComplexBaseField`
23 | :param get: A boolean determining if being called by __get__
24 | """
25 | if items is None or isinstance(items, basestring):
26 | return items
27 |
28 | # cheapest way to convert a queryset to a list
29 | # list(queryset) uses a count() query to determine length
30 | if isinstance(items, QuerySet):
31 | items = [i for i in items]
32 |
33 | self.max_depth = max_depth
34 | doc_type = None
35 |
36 | if instance and instance._fields:
37 | doc_type = instance._fields.get(name)
38 | if hasattr(doc_type, 'field'):
39 | doc_type = doc_type.field
40 |
41 | if isinstance(doc_type, ReferenceField):
42 | field = doc_type
43 | doc_type = doc_type.document_type
44 | is_list = not hasattr(items, 'items')
45 |
46 | if is_list and all([i.__class__ == doc_type for i in items]):
47 | return items
48 | elif not is_list and all([i.__class__ == doc_type
49 | for i in items.values()]):
50 | return items
51 | elif not field.dbref:
52 | if not hasattr(items, 'items'):
53 | items = [field.to_python(v)
54 | if not isinstance(v, (DBRef, Document)) else v
55 | for v in items]
56 | else:
57 | items = dict([
58 | (k, field.to_python(v))
59 | if not isinstance(v, (DBRef, Document)) else (k, v)
60 | for k, v in items.iteritems()]
61 | )
62 |
63 | self.reference_map = self._find_references(items)
64 | self.object_map = self._fetch_objects(doc_type=doc_type)
65 | return self._attach_objects(items, 0, instance, name)
66 |
67 | def _find_references(self, items, depth=0):
68 | """
69 | Recursively finds all db references to be dereferenced
70 |
71 | :param items: The iterable (dict, list, queryset)
72 | :param depth: The current depth of recursion
73 | """
74 | reference_map = {}
75 | if not items or depth >= self.max_depth:
76 | return reference_map
77 |
78 | # Determine the iterator to use
79 | if not hasattr(items, 'items'):
80 | iterator = enumerate(items)
81 | else:
82 | iterator = items.iteritems()
83 |
84 | # Recursively find dbreferences
85 | depth += 1
86 | for k, item in iterator:
87 | if hasattr(item, '_fields'):
88 | for field_name, field in item._fields.iteritems():
89 | v = item._data.get(field_name, None)
90 | if isinstance(v, (DBRef)):
91 | reference_map.setdefault(field.document_type, []).append(v.id)
92 | elif isinstance(v, (dict, SON)) and '_ref' in v:
93 | reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
94 | elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
95 | field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
96 | references = self._find_references(v, depth)
97 | for key, refs in references.iteritems():
98 | if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
99 | key = field_cls
100 | reference_map.setdefault(key, []).extend(refs)
101 | elif isinstance(item, (DBRef)):
102 | reference_map.setdefault(item.collection, []).append(item.id)
103 | elif isinstance(item, (dict, SON)) and '_ref' in item:
104 | reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
105 | elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
106 | references = self._find_references(item, depth - 1)
107 | for key, refs in references.iteritems():
108 | reference_map.setdefault(key, []).extend(refs)
109 |
110 | return reference_map
111 |
112 | def _fetch_objects(self, doc_type=None):
113 | """Fetch all references and convert to their document objects
114 | """
115 | object_map = {}
116 | for col, dbrefs in self.reference_map.iteritems():
117 | keys = object_map.keys()
118 | refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
119 | if hasattr(col, 'objects'): # We have a document class for the refs
120 | references = col.objects.in_bulk(refs)
121 | for key, doc in references.iteritems():
122 | object_map[key] = doc
123 | else: # Generic reference: use the refs data to convert to document
124 | if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
125 | references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
126 | for ref in references:
127 | doc = doc_type._from_son(ref)
128 | object_map[doc.id] = doc
129 | else:
130 | references = get_db()[col].find({'_id': {'$in': refs}})
131 | for ref in references:
132 | if '_cls' in ref:
133 | doc = get_document(ref["_cls"])._from_son(ref)
134 | elif doc_type is None:
135 | doc = get_document(
136 | ''.join(x.capitalize()
137 | for x in col.split('_')))._from_son(ref)
138 | else:
139 | doc = doc_type._from_son(ref)
140 | object_map[doc.id] = doc
141 | return object_map
142 |
143 | def _attach_objects(self, items, depth=0, instance=None, name=None):
144 | """
145 | Recursively finds all db references to be dereferenced
146 |
147 | :param items: The iterable (dict, list, queryset)
148 | :param depth: The current depth of recursion
149 | :param instance: The owning instance used for tracking changes by
150 | :class:`~mongoengine.base.ComplexBaseField`
151 | :param name: The name of the field, used for tracking changes by
152 | :class:`~mongoengine.base.ComplexBaseField`
153 | """
154 | if not items:
155 | if isinstance(items, (BaseDict, BaseList)):
156 | return items
157 |
158 | if instance:
159 | if isinstance(items, dict):
160 | return BaseDict(items, instance, name)
161 | else:
162 | return BaseList(items, instance, name)
163 |
164 | if isinstance(items, (dict, SON)):
165 | if '_ref' in items:
166 | return self.object_map.get(items['_ref'].id, items)
167 | elif '_types' in items and '_cls' in items:
168 | doc = get_document(items['_cls'])._from_son(items)
169 | doc._data = self._attach_objects(doc._data, depth, doc, None)
170 | return doc
171 |
172 | if not hasattr(items, 'items'):
173 | is_list = True
174 | iterator = enumerate(items)
175 | data = []
176 | else:
177 | is_list = False
178 | iterator = items.iteritems()
179 | data = {}
180 |
181 | depth += 1
182 | for k, v in iterator:
183 | if is_list:
184 | data.append(v)
185 | else:
186 | data[k] = v
187 |
188 | if k in self.object_map and not is_list:
189 | data[k] = self.object_map[k]
190 | elif hasattr(v, '_fields'):
191 | for field_name, field in v._fields.iteritems():
192 | v = data[k]._data.get(field_name, None)
193 | if isinstance(v, (DBRef)):
194 | data[k]._data[field_name] = self.object_map.get(v.id, v)
195 | elif isinstance(v, (dict, SON)) and '_ref' in v:
196 | data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
197 | elif isinstance(v, dict) and depth <= self.max_depth:
198 | data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
199 | elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
200 | data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
201 | elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
202 | data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
203 | elif hasattr(v, 'id'):
204 | data[k] = self.object_map.get(v.id, v)
205 |
206 | if instance and name:
207 | if is_list:
208 | return BaseList(data, instance, name)
209 | return BaseDict(data, instance, name)
210 | depth += 1
211 | return data
212 |
--------------------------------------------------------------------------------
/docs/tutorial.rst:
--------------------------------------------------------------------------------
1 | ========
2 | Tutorial
3 | ========
4 | This tutorial introduces **MongoEngine** by means of example --- we will walk
5 | through how to create a simple **Tumblelog** application. A Tumblelog is a type
6 | of blog where posts are not constrained to being conventional text-based posts.
7 | As well as text-based entries, users may post images, links, videos, etc. For
8 | simplicity's sake, we'll stick to text, image and link entries in our
9 | application. As the purpose of this tutorial is to introduce MongoEngine, we'll
10 | focus on the data-modelling side of the application, leaving out a user
11 | interface.
12 |
13 | Getting started
14 | ===============
15 | Before we start, make sure that a copy of MongoDB is running in an accessible
16 | location --- running it locally will be easier, but if that is not an option
17 | then it may be run on a remote server.
18 |
19 | Before we can start using MongoEngine, we need to tell it how to connect to our
20 | instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
21 | function. The only argument we need to provide is the name of the MongoDB
22 | database to use::
23 |
24 | from mongoengine import *
25 |
26 | connect('tumblelog')
27 |
28 | For more information about connecting to MongoDB see :ref:`guide-connecting`.
29 |
30 | Defining our documents
31 | ======================
32 | MongoDB is *schemaless*, which means that no schema is enforced by the database
33 | --- we may add and remove fields however we want and MongoDB won't complain.
34 | This makes life a lot easier in many regards, especially when there is a change
35 | to the data model. However, defining schemata for our documents can help to
36 | iron out bugs involving incorrect types or missing fields, and also allow us to
37 | define utility methods on our documents in the same way that traditional
38 | :abbr:`ORMs (Object-Relational Mappers)` do.
39 |
40 | In our Tumblelog application we need to store several different types of
41 | information. We will need to have a collection of **users**, so that we may
42 | link posts to an individual. We also need to store our different types
43 | **posts** (text, image and link) in the database. To aid navigation of our
44 | Tumblelog, posts may have **tags** associated with them, so that the list of
45 | posts shown to the user may be limited to posts that have been assigned a
46 | specified tag. Finally, it would be nice if **comments** could be added to
47 | posts. We'll start with **users**, as the others are slightly more involved.
48 |
49 | Users
50 | -----
51 | Just as if we were using a relational database with an ORM, we need to define
52 | which fields a :class:`User` may have, and what their types will be::
53 |
54 | class User(Document):
55 | email = StringField(required=True)
56 | first_name = StringField(max_length=50)
57 | last_name = StringField(max_length=50)
58 |
59 | This looks similar to how a the structure of a table would be defined in a
60 | regular ORM. The key difference is that this schema will never be passed on to
61 | MongoDB --- this will only be enforced at the application level. Also, the User
62 | documents will be stored in a MongoDB *collection* rather than a table.
63 |
64 | Posts, Comments and Tags
65 | ------------------------
66 | Now we'll think about how to store the rest of the information. If we were
67 | using a relational database, we would most likely have a table of **posts**, a
68 | table of **comments** and a table of **tags**. To associate the comments with
69 | individual posts, we would put a column in the comments table that contained a
70 | foreign key to the posts table. We'd also need a link table to provide the
71 | many-to-many relationship between posts and tags. Then we'd need to address the
72 | problem of storing the specialised post-types (text, image and link). There are
73 | several ways we can achieve this, but each of them have their problems --- none
74 | of them stand out as particularly intuitive solutions.
75 |
76 | Posts
77 | ^^^^^
78 | But MongoDB *isn't* a relational database, so we're not going to do it that
79 | way. As it turns out, we can use MongoDB's schemaless nature to provide us with
80 | a much nicer solution. We will store all of the posts in *one collection* ---
81 | each post type will just have the fields it needs. If we later want to add
82 | video posts, we don't have to modify the collection at all, we just *start
83 | using* the new fields we need to support video posts. This fits with the
84 | Object-Oriented principle of *inheritance* nicely. We can think of
85 | :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
86 | :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
87 | this kind of modelling out of the box::
88 |
89 | class Post(Document):
90 | title = StringField(max_length=120, required=True)
91 | author = ReferenceField(User)
92 |
93 | class TextPost(Post):
94 | content = StringField()
95 |
96 | class ImagePost(Post):
97 | image_path = StringField()
98 |
99 | class LinkPost(Post):
100 | link_url = StringField()
101 |
102 | We are storing a reference to the author of the posts using a
103 | :class:`~mongoengine.ReferenceField` object. These are similar to foreign key
104 | fields in traditional ORMs, and are automatically translated into references
105 | when they are saved, and dereferenced when they are loaded.
106 |
107 | Tags
108 | ^^^^
109 | Now that we have our Post models figured out, how will we attach tags to them?
110 | MongoDB allows us to store lists of items natively, so rather than having a
111 | link table, we can just store a list of tags in each post. So, for both
112 | efficiency and simplicity's sake, we'll store the tags as strings directly
113 | within the post, rather than storing references to tags in a separate
114 | collection. Especially as tags are generally very short (often even shorter
115 | than a document's id), this denormalisation won't impact very strongly on the
116 | size of our database. So let's take a look that the code our modified
117 | :class:`Post` class::
118 |
119 | class Post(Document):
120 | title = StringField(max_length=120, required=True)
121 | author = ReferenceField(User)
122 | tags = ListField(StringField(max_length=30))
123 |
124 | The :class:`~mongoengine.ListField` object that is used to define a Post's tags
125 | takes a field object as its first argument --- this means that you can have
126 | lists of any type of field (including lists). Note that we don't need to
127 | modify the specialised post types as they all inherit from :class:`Post`.
128 |
129 | Comments
130 | ^^^^^^^^
131 | A comment is typically associated with *one* post. In a relational database, to
132 | display a post with its comments, we would have to retrieve the post from the
133 | database, then query the database again for the comments associated with the
134 | post. This works, but there is no real reason to be storing the comments
135 | separately from their associated posts, other than to work around the
136 | relational model. Using MongoDB we can store the comments as a list of
137 | *embedded documents* directly on a post document. An embedded document should
138 | be treated no differently that a regular document; it just doesn't have its own
139 | collection in the database. Using MongoEngine, we can define the structure of
140 | embedded documents, along with utility methods, in exactly the same way we do
141 | with regular documents::
142 |
143 | class Comment(EmbeddedDocument):
144 | content = StringField()
145 | name = StringField(max_length=120)
146 |
147 | We can then store a list of comment documents in our post document::
148 |
149 | class Post(Document):
150 | title = StringField(max_length=120, required=True)
151 | author = ReferenceField(User)
152 | tags = ListField(StringField(max_length=30))
153 | comments = ListField(EmbeddedDocumentField(Comment))
154 |
155 | Handling deletions of references
156 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
157 |
158 | The :class:`~mongoengine.ReferenceField` object takes a keyword
159 | `reverse_delete_rule` for handling deletion rules if the reference is deleted.
160 | To delete all the posts if a user is deleted set the rule::
161 |
162 | class Post(Document):
163 | title = StringField(max_length=120, required=True)
164 | author = ReferenceField(User, reverse_delete_rule=CASCADE)
165 | tags = ListField(StringField(max_length=30))
166 | comments = ListField(EmbeddedDocumentField(Comment))
167 |
168 | See :class:`~mongoengine.ReferenceField` for more information.
169 |
170 | ..note::
171 | MapFields and DictFields currently don't support automatic handling of
172 | deleted references
173 |
174 |
175 | Adding data to our Tumblelog
176 | ============================
177 | Now that we've defined how our documents will be structured, let's start adding
178 | some documents to the database. Firstly, we'll need to create a :class:`User`
179 | object::
180 |
181 | john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
182 | john.save()
183 |
184 | Note that we could have also defined our user using attribute syntax::
185 |
186 | john = User(email='jdoe@example.com')
187 | john.first_name = 'John'
188 | john.last_name = 'Doe'
189 | john.save()
190 |
191 | Now that we've got our user in the database, let's add a couple of posts::
192 |
193 | post1 = TextPost(title='Fun with MongoEngine', author=john)
194 | post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
195 | post1.tags = ['mongodb', 'mongoengine']
196 | post1.save()
197 |
198 | post2 = LinkPost(title='MongoEngine Documentation', author=john)
199 | post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
200 | post2.tags = ['mongoengine']
201 | post2.save()
202 |
203 | Note that if you change a field on a object that has already been saved, then
204 | call :meth:`save` again, the document will be updated.
205 |
206 | Accessing our data
207 | ==================
208 | So now we've got a couple of posts in our database, how do we display them?
209 | Each document class (i.e. any class that inherits either directly or indirectly
210 | from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
211 | used to access the documents in the database collection associated with that
212 | class. So let's see how we can get our posts' titles::
213 |
214 | for post in Post.objects:
215 | print post.title
216 |
217 | Retrieving type-specific information
218 | ------------------------------------
219 | This will print the titles of our posts, one on each line. But What if we want
220 | to access the type-specific data (link_url, content, etc.)? One way is simply
221 | to use the :attr:`objects` attribute of a subclass of :class:`Post`::
222 |
223 | for post in TextPost.objects:
224 | print post.content
225 |
226 | Using TextPost's :attr:`objects` attribute only returns documents that were
227 | created using :class:`TextPost`. Actually, there is a more general rule here:
228 | the :attr:`objects` attribute of any subclass of :class:`~mongoengine.Document`
229 | only looks for documents that were created using that subclass or one of its
230 | subclasses.
231 |
232 | So how would we display all of our posts, showing only the information that
233 | corresponds to each post's specific type? There is a better way than just using
234 | each of the subclasses individually. When we used :class:`Post`'s
235 | :attr:`objects` attribute earlier, the objects being returned weren't actually
236 | instances of :class:`Post` --- they were instances of the subclass of
237 | :class:`Post` that matches the post's type. Let's look at how this works in
238 | practice::
239 |
240 | for post in Post.objects:
241 | print post.title
242 | print '=' * len(post.title)
243 |
244 | if isinstance(post, TextPost):
245 | print post.content
246 |
247 | if isinstance(post, LinkPost):
248 | print 'Link:', post.link_url
249 |
250 | print
251 |
252 | This would print the title of each post, followed by the content if it was a
253 | text post, and "Link: " if it was a link post.
254 |
255 | Searching our posts by tag
256 | --------------------------
257 | The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
258 | :class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
259 | database only when you need the data. It may also be filtered to narrow down
260 | your query. Let's adjust our query so that only posts with the tag "mongodb"
261 | are returned::
262 |
263 | for post in Post.objects(tags='mongodb'):
264 | print post.title
265 |
266 | There are also methods available on :class:`~mongoengine.queryset.QuerySet`
267 | objects that allow different results to be returned, for example, calling
268 | :meth:`first` on the :attr:`objects` attribute will return a single document,
269 | the first matched by the query you provide. Aggregation functions may also be
270 | used on :class:`~mongoengine.queryset.QuerySet` objects::
271 |
272 | num_posts = Post.objects(tags='mongodb').count()
273 | print 'Found %d posts with tag "mongodb"' % num_posts
274 |
275 |
--------------------------------------------------------------------------------
/docs/changelog.rst:
--------------------------------------------------------------------------------
1 | =========
2 | Changelog
3 | =========
4 |
5 | Changes in 0.7.X
6 | ================
7 | - Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
8 | - Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138)
9 |
10 | Changes in 0.7.5
11 | ================
12 | - ReferenceFields with dbref=False use ObjectId instead of strings (MongoEngine/mongoengine#134)
13 | See ticket for upgrade notes (https://github.com/MongoEngine/mongoengine/issues/134)
14 |
15 | Changes in 0.7.4
16 | ================
17 | - Fixed index inheritance issues - firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125)
18 |
19 | Changes in 0.7.3
20 | ================
21 | - Reverted EmbeddedDocuments meta handling - now can turn off inheritance (MongoEngine/mongoengine#119)
22 |
23 | Changes in 0.7.2
24 | ================
25 | - Update index spec generation so its not destructive (MongoEngine/mongoengine#113)
26 |
27 | Changes in 0.7.1
28 | =================
29 | - Fixed index spec inheritance (MongoEngine/mongoengine#111)
30 |
31 | Changes in 0.7.0
32 | =================
33 | - Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107)
34 | - Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104)
35 | - Fixed Q object merge edge case (MongoEngine/mongoengine#109)
36 | - Fixed reloading on sharded documents (hmarr/mongoengine#569)
37 | - Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62)
38 | - Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92)
39 | - Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88)
40 | - Updated ReferenceField's to optionally store ObjectId strings
41 | this will become the default in 0.8 (MongoEngine/mongoengine#89)
42 | - Added FutureWarning - save will default to `cascade=False` in 0.8
43 | - Added example of indexing embedded document fields (MongoEngine/mongoengine#75)
44 | - Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80)
45 | - Add flexibility for fields handling bad data (MongoEngine/mongoengine#78)
46 | - Embedded Documents no longer handle meta definitions
47 | - Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74)
48 | - Improved queryset filtering (hmarr/mongoengine#554)
49 | - Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
50 | - Fixed abstract classes and shard keys (MongoEngine/mongoengine#64)
51 | - Fixed Python 2.5 support
52 | - Added Python 3 support (thanks to Laine Heron)
53 |
54 | Changes in 0.6.20
55 | =================
56 | - Added support for distinct and db_alias (MongoEngine/mongoengine#59)
57 | - Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
58 | - Fixed BinaryField lookup re (MongoEngine/mongoengine#48)
59 |
60 | Changes in 0.6.19
61 | =================
62 |
63 | - Added Binary support to UUID (MongoEngine/mongoengine#47)
64 | - Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
65 | - Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
66 | - Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
67 | - Fixed queryset manager issue (MongoEngine/mongoengine#52)
68 | - Fixed FileField comparision (hmarr/mongoengine#547)
69 |
70 | Changes in 0.6.18
71 | =================
72 | - Fixed recursion loading bug in _get_changed_fields
73 |
74 | Changes in 0.6.17
75 | =================
76 | - Fixed issue with custom queryset manager expecting explict variable names
77 |
78 | Changes in 0.6.16
79 | =================
80 | - Fixed issue where db_alias wasn't inherited
81 |
82 | Changes in 0.6.15
83 | =================
84 | - Updated validation error messages
85 | - Added support for null / zero / false values in item_frequencies
86 | - Fixed cascade save edge case
87 | - Fixed geo index creation through reference fields
88 | - Added support for args / kwargs when using @queryset_manager
89 | - Deref list custom id fix
90 |
91 | Changes in 0.6.14
92 | =================
93 | - Fixed error dict with nested validation
94 | - Fixed Int/Float fields and not equals None
95 | - Exclude tests from installation
96 | - Allow tuples for index meta
97 | - Fixed use of str in instance checks
98 | - Fixed unicode support in transform update
99 | - Added support for add_to_set and each
100 |
101 | Changes in 0.6.13
102 | =================
103 | - Fixed EmbeddedDocument db_field validation issue
104 | - Fixed StringField unicode issue
105 | - Fixes __repr__ modifying the cursor
106 |
107 | Changes in 0.6.12
108 | =================
109 | - Fixes scalar lookups for primary_key
110 | - Fixes error with _delta handling DBRefs
111 |
112 | Changes in 0.6.11
113 | ==================
114 | - Fixed inconsistency handling None values field attrs
115 | - Fixed map_field embedded db_field issue
116 | - Fixed .save() _delta issue with DbRefs
117 | - Fixed Django TestCase
118 | - Added cmp to Embedded Document
119 | - Added PULL reverse_delete_rule
120 | - Fixed CASCADE delete bug
121 | - Fixed db_field data load error
122 | - Fixed recursive save with FileField
123 |
124 | Changes in 0.6.10
125 | =================
126 | - Fixed basedict / baselist to return super(..)
127 | - Promoted BaseDynamicField to DynamicField
128 |
129 | Changes in 0.6.9
130 | ================
131 | - Fixed sparse indexes on inherited docs
132 | - Removed FileField auto deletion, needs more work maybe 0.7
133 |
134 | Changes in 0.6.8
135 | ================
136 | - Fixed FileField losing reference when no default set
137 | - Removed possible race condition from FileField (grid_file)
138 | - Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
139 | - Added support for pull operations on nested EmbeddedDocuments
140 | - Added support for choices with GenericReferenceFields
141 | - Added support for choices with GenericEmbeddedDocumentFields
142 | - Fixed Django 1.4 sessions first save data loss
143 | - FileField now automatically delete files on .delete()
144 | - Fix for GenericReference to_mongo method
145 | - Fixed connection regression
146 | - Updated Django User document, now allows inheritance
147 |
148 | Changes in 0.6.7
149 | ================
150 | - Fixed indexing on '_id' or 'pk' or 'id'
151 | - Invalid data from the DB now raises a InvalidDocumentError
152 | - Cleaned up the Validation Error - docs and code
153 | - Added meta `auto_create_index` so you can disable index creation
154 | - Added write concern options to inserts
155 | - Fixed typo in meta for index options
156 | - Bug fix Read preference now passed correctly
157 | - Added support for File like objects for GridFS
158 | - Fix for #473 - Dereferencing abstracts
159 |
160 | Changes in 0.6.6
161 | ================
162 | - Django 1.4 fixed (finally)
163 | - Added tests for Django
164 |
165 | Changes in 0.6.5
166 | ================
167 | - More Django updates
168 |
169 | Changes in 0.6.4
170 | ================
171 |
172 | - Refactored connection / fixed replicasetconnection
173 | - Bug fix for unknown connection alias error message
174 | - Sessions support Django 1.3 and Django 1.4
175 | - Minor fix for ReferenceField
176 |
177 | Changes in 0.6.3
178 | ================
179 | - Updated sessions for Django 1.4
180 | - Bug fix for updates where listfields contain embedded documents
181 | - Bug fix for collection naming and mixins
182 |
183 | Changes in 0.6.2
184 | ================
185 | - Updated documentation for ReplicaSet connections
186 | - Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
187 |
188 | Changes in 0.6.1
189 | ================
190 | - Fix for replicaSet connections
191 |
192 | Changes in 0.6
193 | ================
194 |
195 | - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
196 | - Added support for covered indexes when inheritance is off
197 | - No longer always upsert on save for items with a '_id'
198 | - Error raised if update doesn't have an operation
199 | - DeReferencing is now thread safe
200 | - Errors raised if trying to perform a join in a query
201 | - Updates can now take __raw__ queries
202 | - Added custom 2D index declarations
203 | - Added replicaSet connection support
204 | - Updated deprecated imports from pymongo (safe for pymongo 2.2)
205 | - Added uri support for connections
206 | - Added scalar for efficiently returning partial data values (aliased to values_list)
207 | - Fixed limit skip bug
208 | - Improved Inheritance / Mixin
209 | - Added sharding support
210 | - Added pymongo 2.1 support
211 | - Fixed Abstract documents can now declare indexes
212 | - Added db_alias support to individual documents
213 | - Fixed GridFS documents can now be pickled
214 | - Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field
215 | - Added InvalidQueryError when calling with_id with a filter
216 | - Added support for DBRefs in distinct()
217 | - Fixed issue saving False booleans
218 | - Fixed issue with dynamic documents deltas
219 | - Added Reverse Delete Rule support to ListFields - MapFields aren't supported
220 | - Added customisable cascade kwarg options
221 | - Fixed Handle None values for non-required fields
222 | - Removed Document._get_subclasses() - no longer required
223 | - Fixed bug requiring subclasses when not actually needed
224 | - Fixed deletion of dynamic data
225 | - Added support for the $elementMatch operator
226 | - Added reverse option to SortedListFields
227 | - Fixed dereferencing - multi directional list dereferencing
228 | - Fixed issue creating indexes with recursive embedded documents
229 | - Fixed recursive lookup in _unique_with_indexes
230 | - Fixed passing ComplexField defaults to constructor for ReferenceFields
231 | - Fixed validation of DictField Int keys
232 | - Added optional cascade saving
233 | - Fixed dereferencing - max_depth now taken into account
234 | - Fixed document mutation saving issue
235 | - Fixed positional operator when replacing embedded documents
236 | - Added Non-Django Style choices back (you can have either)
237 | - Fixed __repr__ of a sliced queryset
238 | - Added recursive validation error of documents / complex fields
239 | - Fixed breaking during queryset iteration
240 | - Added pre and post bulk-insert signals
241 | - Added ImageField - requires PIL
242 | - Fixed Reference Fields can be None in get_or_create / queries
243 | - Fixed accessing pk on an embedded document
244 | - Fixed calling a queryset after drop_collection now recreates the collection
245 | - Add field name to validation exception messages
246 | - Added UUID field
247 | - Improved efficiency of .get()
248 | - Updated ComplexFields so if required they won't accept empty lists / dicts
249 | - Added spec file for rpm-based distributions
250 | - Fixed ListField so it doesnt accept strings
251 | - Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas
252 |
253 | Changes in v0.5.2
254 | =================
255 |
256 | - A Robust Circular reference bugfix
257 |
258 |
259 | Changes in v0.5.1
260 | =================
261 |
262 | - Fixed simple circular reference bug
263 |
264 | Changes in v0.5
265 | ===============
266 |
267 | - Added InvalidDocumentError - so Document core methods can't be overwritten
268 | - Added GenericEmbeddedDocument - so you can embed any type of embeddable document
269 | - Added within_polygon support - for those with mongodb 1.9
270 | - Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments
271 | - Added where() - filter to allowing users to specify query expressions as Javascript
272 | - Added SequenceField - for creating sequential counters
273 | - Added update() convenience method to a document
274 | - Added cascading saves - so changes to Referenced documents are saved on .save()
275 | - Added select_related() support
276 | - Added support for the positional operator
277 | - Updated geo index checking to be recursive and check in embedded documents
278 | - Updated default collection naming convention
279 | - Added Document Mixin support
280 | - Fixed queryet __repr__ mid iteration
281 | - Added hint() support, so cantell Mongo the proper index to use for the query
282 | - Fixed issue with inconsitent setting of _cls breaking inherited referencing
283 | - Added help_text and verbose_name to fields to help with some form libs
284 | - Updated item_frequencies to handle embedded document lookups
285 | - Added delta tracking now only sets / unsets explicitly changed fields
286 | - Fixed saving so sets updated values rather than overwrites
287 | - Added ComplexDateTimeField - Handles datetimes correctly with microseconds
288 | - Added ComplexBaseField - for improved flexibility and performance
289 | - Added get_FIELD_display() method for easy choice field displaying
290 | - Added queryset.slave_okay(enabled) method
291 | - Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable
292 | - Added insert method for bulk inserts
293 | - Added blinker signal support
294 | - Added query_counter context manager for tests
295 | - Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments)
296 | - Added inline_map_reduce option to map_reduce
297 | - Updated connection exception so it provides more info on the cause.
298 | - Added searching multiple levels deep in ``DictField``
299 | - Added ``DictField`` entries containing strings to use matching operators
300 | - Added ``MapField``, similar to ``DictField``
301 | - Added Abstract Base Classes
302 | - Added Custom Objects Managers
303 | - Added sliced subfields updating
304 | - Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry
305 | - Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create``
306 | - Added slicing / subarray fetching controls
307 | - Fixed various unique index and other index issues
308 | - Fixed threaded connection issues
309 | - Added spherical geospatial query operators
310 | - Updated queryset to handle latest version of pymongo
311 | map_reduce now requires an output.
312 | - Added ``Document`` __hash__, __ne__ for pickling
313 | - Added ``FileField`` optional size arg for read method
314 | - Fixed ``FileField`` seek and tell methods for reading files
315 | - Added ``QuerySet.clone`` to support copying querysets
316 | - Fixed item_frequencies when using name thats the same as a native js function
317 | - Added reverse delete rules
318 | - Fixed issue with unset operation
319 | - Fixed Q-object bug
320 | - Added ``QuerySet.all_fields`` resets previous .only() and .exclude()
321 | - Added ``QuerySet.exclude``
322 | - Added django style choices
323 | - Fixed order and filter issue
324 | - Added ``QuerySet.only`` subfield support
325 | - Added creation_counter to ``BaseField`` allowing fields to be sorted in the
326 | way the user has specified them
327 | - Fixed various errors
328 | - Added many tests
329 |
330 | Changes in v0.4
331 | ===============
332 | - Added ``GridFSStorage`` Django storage backend
333 | - Added ``FileField`` for GridFS support
334 | - New Q-object implementation, which is no longer based on Javascript
335 | - Added ``SortedListField``
336 | - Added ``EmailField``
337 | - Added ``GeoPointField``
338 | - Added ``exact`` and ``iexact`` match operators to ``QuerySet``
339 | - Added ``get_document_or_404`` and ``get_list_or_404`` Django shortcuts
340 | - Added new query operators for Geo queries
341 | - Added ``not`` query operator
342 | - Added new update operators: ``pop`` and ``add_to_set``
343 | - Added ``__raw__`` query parameter
344 | - Added support for custom querysets
345 | - Fixed document inheritance primary key issue
346 | - Added support for querying by array element position
347 | - Base class can now be defined for ``DictField``
348 | - Fixed MRO error that occured on document inheritance
349 | - Added ``QuerySet.distinct``, ``QuerySet.create``, ``QuerySet.snapshot``,
350 | ``QuerySet.timeout`` and ``QuerySet.all``
351 | - Subsequent calls to ``connect()`` now work
352 | - Introduced ``min_length`` for ``StringField``
353 | - Fixed multi-process connection issue
354 | - Other minor fixes
355 |
356 | Changes in v0.3
357 | ===============
358 | - Added MapReduce support
359 | - Added ``contains``, ``startswith`` and ``endswith`` query operators (and
360 | case-insensitive versions that are prefixed with 'i')
361 | - Deprecated fields' ``name`` parameter, replaced with ``db_field``
362 | - Added ``QuerySet.only`` for only retrieving specific fields
363 | - Added ``QuerySet.in_bulk()`` for bulk querying using ids
364 | - ``QuerySet``\ s now have a ``rewind()`` method, which is called automatically
365 | when the iterator is exhausted, allowing ``QuerySet``\ s to be reused
366 | - Added ``DictField``
367 | - Added ``URLField``
368 | - Added ``DecimalField``
369 | - Added ``BinaryField``
370 | - Added ``GenericReferenceField``
371 | - Added ``get()`` and ``get_or_create()`` methods to ``QuerySet``
372 | - ``ReferenceField``\ s may now reference the document they are defined on
373 | (recursive references) and documents that have not yet been defined
374 | - ``Document`` objects may now be compared for equality (equal if _ids are
375 | equal and documents are of same type)
376 | - ``QuerySet`` update methods now have an ``upsert`` parameter
377 | - Added field name substitution for Javascript code (allows the user to use the
378 | Python names for fields in JS, which are later substituted for the real field
379 | names)
380 | - ``Q`` objects now support regex querying
381 | - Fixed bug where referenced documents within lists weren't properly
382 | dereferenced
383 | - ``ReferenceField``\ s may now be queried using their _id
384 | - Fixed bug where ``EmbeddedDocuments`` couldn't be non-polymorphic
385 | - ``queryset_manager`` functions now accept two arguments -- the document class
386 | as the first and the queryset as the second
387 | - Fixed bug where ``QuerySet.exec_js`` ignored ``Q`` objects
388 | - Other minor fixes
389 |
390 | Changes in v0.2.2
391 | =================
392 | - Fixed bug that prevented indexes from being used on ``ListField``\ s
393 | - ``Document.filter()`` added as an alias to ``Document.__call__()``
394 | - ``validate()`` may now be used on ``EmbeddedDocument``\ s
395 |
396 | Changes in v0.2.1
397 | =================
398 | - Added a MongoEngine backend for Django sessions
399 | - Added ``force_insert`` to ``Document.save()``
400 | - Improved querying syntax for ``ListField`` and ``EmbeddedDocumentField``
401 | - Added support for user-defined primary keys (``_id`` in MongoDB)
402 |
403 | Changes in v0.2
404 | ===============
405 | - Added ``Q`` class for building advanced queries
406 | - Added ``QuerySet`` methods for atomic updates to documents
407 | - Fields may now specify ``unique=True`` to enforce uniqueness across a
408 | collection
409 | - Added option for default document ordering
410 | - Fixed bug in index definitions
411 |
412 | Changes in v0.1.3
413 | =================
414 | - Added Django authentication backend
415 | - Added ``Document.meta`` support for indexes, which are ensured just before
416 | querying takes place
417 | - A few minor bugfixes
418 |
419 |
420 | Changes in v0.1.2
421 | =================
422 | - Query values may be processed before before being used in queries
423 | - Made connections lazy
424 | - Fixed bug in Document dictionary-style access
425 | - Added ``BooleanField``
426 | - Added ``Document.reload()`` method
427 |
428 |
429 | Changes in v0.1.1
430 | =================
431 | - Documents may now use capped collections
432 |
--------------------------------------------------------------------------------
/tests/test_dynamic_document.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from mongoengine import *
4 | from mongoengine.connection import get_db
5 |
6 |
7 | class DynamicDocTest(unittest.TestCase):
8 |
9 | def setUp(self):
10 | connect(db='mongoenginetest')
11 | self.db = get_db()
12 |
13 | class Person(DynamicDocument):
14 | name = StringField()
15 | meta = {'allow_inheritance': True}
16 |
17 | Person.drop_collection()
18 |
19 | self.Person = Person
20 |
21 | def test_simple_dynamic_document(self):
22 | """Ensures simple dynamic documents are saved correctly"""
23 |
24 | p = self.Person()
25 | p.name = "James"
26 | p.age = 34
27 |
28 | self.assertEqual(p.to_mongo(),
29 | {"_types": ["Person"], "_cls": "Person",
30 | "name": "James", "age": 34}
31 | )
32 |
33 | p.save()
34 |
35 | self.assertEqual(self.Person.objects.first().age, 34)
36 |
37 | # Confirm no changes to self.Person
38 | self.assertFalse(hasattr(self.Person, 'age'))
39 |
40 | def test_dynamic_document_delta(self):
41 | """Ensures simple dynamic documents can delta correctly"""
42 | p = self.Person(name="James", age=34)
43 | self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
44 |
45 | p.doc = 123
46 | del(p.doc)
47 | self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
48 |
49 | def test_change_scope_of_variable(self):
50 | """Test changing the scope of a dynamic field has no adverse effects"""
51 | p = self.Person()
52 | p.name = "Dean"
53 | p.misc = 22
54 | p.save()
55 |
56 | p = self.Person.objects.get()
57 | p.misc = {'hello': 'world'}
58 | p.save()
59 |
60 | p = self.Person.objects.get()
61 | self.assertEqual(p.misc, {'hello': 'world'})
62 |
63 | def test_delete_dynamic_field(self):
64 | """Test deleting a dynamic field works"""
65 | self.Person.drop_collection()
66 | p = self.Person()
67 | p.name = "Dean"
68 | p.misc = 22
69 | p.save()
70 |
71 | p = self.Person.objects.get()
72 | p.misc = {'hello': 'world'}
73 | p.save()
74 |
75 | p = self.Person.objects.get()
76 | self.assertEqual(p.misc, {'hello': 'world'})
77 | collection = self.db[self.Person._get_collection_name()]
78 | obj = collection.find_one()
79 | self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
80 |
81 | del(p.misc)
82 | p.save()
83 |
84 | p = self.Person.objects.get()
85 | self.assertFalse(hasattr(p, 'misc'))
86 |
87 | obj = collection.find_one()
88 | self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
89 |
90 | def test_dynamic_document_queries(self):
91 | """Ensure we can query dynamic fields"""
92 | p = self.Person()
93 | p.name = "Dean"
94 | p.age = 22
95 | p.save()
96 |
97 | self.assertEqual(1, self.Person.objects(age=22).count())
98 | p = self.Person.objects(age=22)
99 | p = p.get()
100 | self.assertEqual(22, p.age)
101 |
102 | def test_complex_dynamic_document_queries(self):
103 | class Person(DynamicDocument):
104 | name = StringField()
105 |
106 | Person.drop_collection()
107 |
108 | p = Person(name="test")
109 | p.age = "ten"
110 | p.save()
111 |
112 | p1 = Person(name="test1")
113 | p1.age = "less then ten and a half"
114 | p1.save()
115 |
116 | p2 = Person(name="test2")
117 | p2.age = 10
118 | p2.save()
119 |
120 | self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
121 | self.assertEqual(Person.objects(age__gte=10).count(), 1)
122 |
123 | def test_complex_data_lookups(self):
124 | """Ensure you can query dynamic document dynamic fields"""
125 | p = self.Person()
126 | p.misc = {'hello': 'world'}
127 | p.save()
128 |
129 | self.assertEqual(1, self.Person.objects(misc__hello='world').count())
130 |
131 | def test_inheritance(self):
132 | """Ensure that dynamic document plays nice with inheritance"""
133 | class Employee(self.Person):
134 | salary = IntField()
135 |
136 | Employee.drop_collection()
137 |
138 | self.assertTrue('name' in Employee._fields)
139 | self.assertTrue('salary' in Employee._fields)
140 | self.assertEqual(Employee._get_collection_name(),
141 | self.Person._get_collection_name())
142 |
143 | joe_bloggs = Employee()
144 | joe_bloggs.name = "Joe Bloggs"
145 | joe_bloggs.salary = 10
146 | joe_bloggs.age = 20
147 | joe_bloggs.save()
148 |
149 | self.assertEqual(1, self.Person.objects(age=20).count())
150 | self.assertEqual(1, Employee.objects(age=20).count())
151 |
152 | joe_bloggs = self.Person.objects.first()
153 | self.assertTrue(isinstance(joe_bloggs, Employee))
154 |
155 | def test_embedded_dynamic_document(self):
156 | """Test dynamic embedded documents"""
157 | class Embedded(DynamicEmbeddedDocument):
158 | pass
159 |
160 | class Doc(DynamicDocument):
161 | pass
162 |
163 | Doc.drop_collection()
164 | doc = Doc()
165 |
166 | embedded_1 = Embedded()
167 | embedded_1.string_field = 'hello'
168 | embedded_1.int_field = 1
169 | embedded_1.dict_field = {'hello': 'world'}
170 | embedded_1.list_field = ['1', 2, {'hello': 'world'}]
171 | doc.embedded_field = embedded_1
172 |
173 | self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
174 | "embedded_field": {
175 | "_types": ['Embedded'], "_cls": "Embedded",
176 | "string_field": "hello",
177 | "int_field": 1,
178 | "dict_field": {"hello": "world"},
179 | "list_field": ['1', 2, {'hello': 'world'}]
180 | }
181 | })
182 | doc.save()
183 |
184 | doc = Doc.objects.first()
185 | self.assertEqual(doc.embedded_field.__class__, Embedded)
186 | self.assertEqual(doc.embedded_field.string_field, "hello")
187 | self.assertEqual(doc.embedded_field.int_field, 1)
188 | self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
189 | self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
190 |
191 | def test_complex_embedded_documents(self):
192 | """Test complex dynamic embedded documents setups"""
193 | class Embedded(DynamicEmbeddedDocument):
194 | pass
195 |
196 | class Doc(DynamicDocument):
197 | pass
198 |
199 | Doc.drop_collection()
200 | doc = Doc()
201 |
202 | embedded_1 = Embedded()
203 | embedded_1.string_field = 'hello'
204 | embedded_1.int_field = 1
205 | embedded_1.dict_field = {'hello': 'world'}
206 |
207 | embedded_2 = Embedded()
208 | embedded_2.string_field = 'hello'
209 | embedded_2.int_field = 1
210 | embedded_2.dict_field = {'hello': 'world'}
211 | embedded_2.list_field = ['1', 2, {'hello': 'world'}]
212 |
213 | embedded_1.list_field = ['1', 2, embedded_2]
214 | doc.embedded_field = embedded_1
215 |
216 | self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
217 | "embedded_field": {
218 | "_types": ['Embedded'], "_cls": "Embedded",
219 | "string_field": "hello",
220 | "int_field": 1,
221 | "dict_field": {"hello": "world"},
222 | "list_field": ['1', 2,
223 | {"_types": ['Embedded'], "_cls": "Embedded",
224 | "string_field": "hello",
225 | "int_field": 1,
226 | "dict_field": {"hello": "world"},
227 | "list_field": ['1', 2, {'hello': 'world'}]}
228 | ]
229 | }
230 | })
231 | doc.save()
232 | doc = Doc.objects.first()
233 | self.assertEqual(doc.embedded_field.__class__, Embedded)
234 | self.assertEqual(doc.embedded_field.string_field, "hello")
235 | self.assertEqual(doc.embedded_field.int_field, 1)
236 | self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
237 | self.assertEqual(doc.embedded_field.list_field[0], '1')
238 | self.assertEqual(doc.embedded_field.list_field[1], 2)
239 |
240 | embedded_field = doc.embedded_field.list_field[2]
241 |
242 | self.assertEqual(embedded_field.__class__, Embedded)
243 | self.assertEqual(embedded_field.string_field, "hello")
244 | self.assertEqual(embedded_field.int_field, 1)
245 | self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
246 | self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
247 |
248 | def test_delta_for_dynamic_documents(self):
249 | p = self.Person()
250 | p.name = "Dean"
251 | p.age = 22
252 | p.save()
253 |
254 | p.age = 24
255 | self.assertEqual(p.age, 24)
256 | self.assertEqual(p._get_changed_fields(), ['age'])
257 | self.assertEqual(p._delta(), ({'age': 24}, {}))
258 |
259 | p = self.Person.objects(age=22).get()
260 | p.age = 24
261 | self.assertEqual(p.age, 24)
262 | self.assertEqual(p._get_changed_fields(), ['age'])
263 | self.assertEqual(p._delta(), ({'age': 24}, {}))
264 |
265 | p.save()
266 | self.assertEqual(1, self.Person.objects(age=24).count())
267 |
268 | def test_delta(self):
269 |
270 | class Doc(DynamicDocument):
271 | pass
272 |
273 | Doc.drop_collection()
274 | doc = Doc()
275 | doc.save()
276 |
277 | doc = Doc.objects.first()
278 | self.assertEqual(doc._get_changed_fields(), [])
279 | self.assertEqual(doc._delta(), ({}, {}))
280 |
281 | doc.string_field = 'hello'
282 | self.assertEqual(doc._get_changed_fields(), ['string_field'])
283 | self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
284 |
285 | doc._changed_fields = []
286 | doc.int_field = 1
287 | self.assertEqual(doc._get_changed_fields(), ['int_field'])
288 | self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
289 |
290 | doc._changed_fields = []
291 | dict_value = {'hello': 'world', 'ping': 'pong'}
292 | doc.dict_field = dict_value
293 | self.assertEqual(doc._get_changed_fields(), ['dict_field'])
294 | self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
295 |
296 | doc._changed_fields = []
297 | list_value = ['1', 2, {'hello': 'world'}]
298 | doc.list_field = list_value
299 | self.assertEqual(doc._get_changed_fields(), ['list_field'])
300 | self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
301 |
302 | # Test unsetting
303 | doc._changed_fields = []
304 | doc.dict_field = {}
305 | self.assertEqual(doc._get_changed_fields(), ['dict_field'])
306 | self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
307 |
308 | doc._changed_fields = []
309 | doc.list_field = []
310 | self.assertEqual(doc._get_changed_fields(), ['list_field'])
311 | self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
312 |
313 | def test_delta_recursive(self):
314 | """Testing deltaing works with dynamic documents"""
315 | class Embedded(DynamicEmbeddedDocument):
316 | pass
317 |
318 | class Doc(DynamicDocument):
319 | pass
320 |
321 | Doc.drop_collection()
322 | doc = Doc()
323 | doc.save()
324 |
325 | doc = Doc.objects.first()
326 | self.assertEqual(doc._get_changed_fields(), [])
327 | self.assertEqual(doc._delta(), ({}, {}))
328 |
329 | embedded_1 = Embedded()
330 | embedded_1.string_field = 'hello'
331 | embedded_1.int_field = 1
332 | embedded_1.dict_field = {'hello': 'world'}
333 | embedded_1.list_field = ['1', 2, {'hello': 'world'}]
334 | doc.embedded_field = embedded_1
335 |
336 | self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
337 |
338 | embedded_delta = {
339 | 'string_field': 'hello',
340 | 'int_field': 1,
341 | 'dict_field': {'hello': 'world'},
342 | 'list_field': ['1', 2, {'hello': 'world'}]
343 | }
344 | self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
345 | embedded_delta.update({
346 | '_types': ['Embedded'],
347 | '_cls': 'Embedded',
348 | })
349 | self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {}))
350 |
351 | doc.save()
352 | doc.reload()
353 |
354 | doc.embedded_field.dict_field = {}
355 | self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field'])
356 | self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
357 |
358 | self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
359 | doc.save()
360 | doc.reload()
361 |
362 | doc.embedded_field.list_field = []
363 | self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
364 | self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
365 | self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
366 | doc.save()
367 | doc.reload()
368 |
369 | embedded_2 = Embedded()
370 | embedded_2.string_field = 'hello'
371 | embedded_2.int_field = 1
372 | embedded_2.dict_field = {'hello': 'world'}
373 | embedded_2.list_field = ['1', 2, {'hello': 'world'}]
374 |
375 | doc.embedded_field.list_field = ['1', 2, embedded_2]
376 | self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
377 | self.assertEqual(doc.embedded_field._delta(), ({
378 | 'list_field': ['1', 2, {
379 | '_cls': 'Embedded',
380 | '_types': ['Embedded'],
381 | 'string_field': 'hello',
382 | 'dict_field': {'hello': 'world'},
383 | 'int_field': 1,
384 | 'list_field': ['1', 2, {'hello': 'world'}],
385 | }]
386 | }, {}))
387 |
388 | self.assertEqual(doc._delta(), ({
389 | 'embedded_field.list_field': ['1', 2, {
390 | '_cls': 'Embedded',
391 | '_types': ['Embedded'],
392 | 'string_field': 'hello',
393 | 'dict_field': {'hello': 'world'},
394 | 'int_field': 1,
395 | 'list_field': ['1', 2, {'hello': 'world'}],
396 | }]
397 | }, {}))
398 | doc.save()
399 | doc.reload()
400 |
401 | self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, [])
402 | self.assertEqual(doc.embedded_field.list_field[0], '1')
403 | self.assertEqual(doc.embedded_field.list_field[1], 2)
404 | for k in doc.embedded_field.list_field[2]._fields:
405 | self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k])
406 |
407 | doc.embedded_field.list_field[2].string_field = 'world'
408 | self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
409 | self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
410 | self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
411 | doc.save()
412 | doc.reload()
413 | self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world')
414 |
415 | # Test multiple assignments
416 | doc.embedded_field.list_field[2].string_field = 'hello world'
417 | doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
418 | self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
419 | self.assertEqual(doc.embedded_field._delta(), ({
420 | 'list_field': ['1', 2, {
421 | '_types': ['Embedded'],
422 | '_cls': 'Embedded',
423 | 'string_field': 'hello world',
424 | 'int_field': 1,
425 | 'list_field': ['1', 2, {'hello': 'world'}],
426 | 'dict_field': {'hello': 'world'}}]}, {}))
427 | self.assertEqual(doc._delta(), ({
428 | 'embedded_field.list_field': ['1', 2, {
429 | '_types': ['Embedded'],
430 | '_cls': 'Embedded',
431 | 'string_field': 'hello world',
432 | 'int_field': 1,
433 | 'list_field': ['1', 2, {'hello': 'world'}],
434 | 'dict_field': {'hello': 'world'}}
435 | ]}, {}))
436 | doc.save()
437 | doc.reload()
438 | self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world')
439 |
440 | # Test list native methods
441 | doc.embedded_field.list_field[2].list_field.pop(0)
442 | self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
443 | doc.save()
444 | doc.reload()
445 |
446 | doc.embedded_field.list_field[2].list_field.append(1)
447 | self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
448 | doc.save()
449 | doc.reload()
450 | self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
451 |
452 | doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types
453 | doc.save()
454 | doc.reload()
455 | self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
456 |
457 | del(doc.embedded_field.list_field[2].list_field[2]['hello'])
458 | self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
459 | doc.save()
460 | doc.reload()
461 |
462 | del(doc.embedded_field.list_field[2].list_field)
463 | self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
464 |
465 | doc.save()
466 | doc.reload()
467 |
468 | doc.dict_field = {'embedded': embedded_1}
469 | doc.save()
470 | doc.reload()
471 |
472 | doc.dict_field['embedded'].string_field = 'Hello World'
473 | self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
474 | self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
475 |
476 | def test_indexes(self):
477 | """Ensure that indexes are used when meta[indexes] is specified.
478 | """
479 | class BlogPost(DynamicDocument):
480 | meta = {
481 | 'indexes': [
482 | '-date',
483 | ('category', '-date')
484 | ],
485 | }
486 |
487 | BlogPost.drop_collection()
488 |
489 | info = BlogPost.objects._collection.index_information()
490 | # _id, '-date', ('cat', 'date')
491 | # NB: there is no index on _types by itself, since
492 | # the indices on -date and tags will both contain
493 | # _types as first element in the key
494 | self.assertEqual(len(info), 3)
495 |
496 | # Indexes are lazy so use list() to perform query
497 | list(BlogPost.objects)
498 | info = BlogPost.objects._collection.index_information()
499 | info = [value['key'] for key, value in info.iteritems()]
500 | self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
501 | in info)
502 | self.assertTrue([('_types', 1), ('date', -1)] in info)
503 |
504 | def test_dynamic_and_embedded(self):
505 | """Ensure embedded documents play nicely"""
506 |
507 | class Address(EmbeddedDocument):
508 | city = StringField()
509 |
510 | class Person(DynamicDocument):
511 | name = StringField()
512 | meta = {'allow_inheritance': True}
513 |
514 | Person.drop_collection()
515 |
516 | Person(name="Ross", address=Address(city="London")).save()
517 |
518 | person = Person.objects.first()
519 | person.address.city = "Lundenne"
520 | person.save()
521 |
522 | self.assertEqual(Person.objects.first().address.city, "Lundenne")
523 |
524 | person = Person.objects.first()
525 | person.address = Address(city="Londinium")
526 | person.save()
527 |
528 | self.assertEqual(Person.objects.first().address.city, "Londinium")
529 |
530 | person = Person.objects.first()
531 | person.age = 35
532 | person.save()
533 | self.assertEqual(Person.objects.first().age, 35)
534 |
--------------------------------------------------------------------------------
/mongoengine/document.py:
--------------------------------------------------------------------------------
1 | import warnings
2 |
3 | import pymongo
4 | import re
5 |
6 | from bson.dbref import DBRef
7 | from mongoengine import signals, queryset
8 |
9 | from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
10 | BaseDict, BaseList)
11 | from queryset import OperationError, NotUniqueError
12 | from connection import get_db, DEFAULT_CONNECTION_NAME
13 |
14 | __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
15 | 'DynamicEmbeddedDocument', 'OperationError',
16 | 'InvalidCollectionError', 'NotUniqueError']
17 |
18 |
19 | class InvalidCollectionError(Exception):
20 | pass
21 |
22 |
23 | class EmbeddedDocument(BaseDocument):
24 | """A :class:`~mongoengine.Document` that isn't stored in its own
25 | collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
26 | fields on :class:`~mongoengine.Document`\ s through the
27 | :class:`~mongoengine.EmbeddedDocumentField` field type.
28 |
29 | A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed,
30 | to create a specialised version of the embedded document that will be
31 | stored in the same collection. To facilitate this behaviour, `_cls` and
32 | `_types` fields are added to documents (hidden though the MongoEngine
33 | interface though). To disable this behaviour and remove the dependence on
34 | the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
35 | ``False`` in the :attr:`meta` dictionary.
36 | """
37 |
38 | # The __metaclass__ attribute is removed by 2to3 when running with Python3
39 | # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
40 | my_metaclass = DocumentMetaclass
41 | __metaclass__ = DocumentMetaclass
42 |
43 | def __init__(self, *args, **kwargs):
44 | super(EmbeddedDocument, self).__init__(*args, **kwargs)
45 | self._changed_fields = []
46 |
47 | def __delattr__(self, *args, **kwargs):
48 | """Handle deletions of fields"""
49 | field_name = args[0]
50 | if field_name in self._fields:
51 | default = self._fields[field_name].default
52 | if callable(default):
53 | default = default()
54 | setattr(self, field_name, default)
55 | else:
56 | super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
57 |
58 | def __eq__(self, other):
59 | if isinstance(other, self.__class__):
60 | return self._data == other._data
61 | return False
62 |
63 |
64 | class Document(BaseDocument):
65 | """The base class used for defining the structure and properties of
66 | collections of documents stored in MongoDB. Inherit from this class, and
67 | add fields as class attributes to define a document's structure.
68 | Individual documents may then be created by making instances of the
69 | :class:`~mongoengine.Document` subclass.
70 |
71 | By default, the MongoDB collection used to store documents created using a
72 | :class:`~mongoengine.Document` subclass will be the name of the subclass
73 | converted to lowercase. A different collection may be specified by
74 | providing :attr:`collection` to the :attr:`meta` dictionary in the class
75 | definition.
76 |
77 | A :class:`~mongoengine.Document` subclass may be itself subclassed, to
78 | create a specialised version of the document that will be stored in the
79 | same collection. To facilitate this behaviour, `_cls` and `_types`
80 | fields are added to documents (hidden though the MongoEngine interface
81 | though). To disable this behaviour and remove the dependence on the
82 | presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
83 | ``False`` in the :attr:`meta` dictionary.
84 |
85 | A :class:`~mongoengine.Document` may use a **Capped Collection** by
86 | specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
87 | dictionary. :attr:`max_documents` is the maximum number of documents that
88 | is allowed to be stored in the collection, and :attr:`max_size` is the
89 | maximum size of the collection in bytes. If :attr:`max_size` is not
90 | specified and :attr:`max_documents` is, :attr:`max_size` defaults to
91 | 10000000 bytes (10MB).
92 |
93 | Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
94 | dictionary. The value should be a list of field names or tuples of field
95 | names. Index direction may be specified by prefixing the field names with
96 | a **+** or **-** sign.
97 |
98 | Automatic index creation can be disabled by specifying
99 | attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
100 | False then indexes will not be created by MongoEngine. This is useful in
101 | production systems where index creation is performed as part of a deployment
102 | system.
103 |
104 | By default, _types will be added to the start of every index (that
105 | doesn't contain a list) if allow_inheritance is True. This can be
106 | disabled by either setting types to False on the specific index or
107 | by setting index_types to False on the meta dictionary for the document.
108 | """
109 |
110 | # The __metaclass__ attribute is removed by 2to3 when running with Python3
111 | # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
112 | my_metaclass = TopLevelDocumentMetaclass
113 | __metaclass__ = TopLevelDocumentMetaclass
114 |
115 | def pk():
116 | """Primary key alias
117 | """
118 | def fget(self):
119 | return getattr(self, self._meta['id_field'])
120 | def fset(self, value):
121 | return setattr(self, self._meta['id_field'], value)
122 | return property(fget, fset)
123 | pk = pk()
124 |
125 | @classmethod
126 | def _get_db(cls):
127 | """Some Model using other db_alias"""
128 | return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME ))
129 |
130 | @classmethod
131 | def _get_collection(cls):
132 | """Returns the collection for the document."""
133 | if not hasattr(cls, '_collection') or cls._collection is None:
134 | db = cls._get_db()
135 | collection_name = cls._get_collection_name()
136 | # Create collection as a capped collection if specified
137 | if cls._meta['max_size'] or cls._meta['max_documents']:
138 | # Get max document limit and max byte size from meta
139 | max_size = cls._meta['max_size'] or 10000000 # 10MB default
140 | max_documents = cls._meta['max_documents']
141 |
142 | if collection_name in db.collection_names():
143 | cls._collection = db[collection_name]
144 | # The collection already exists, check if its capped
145 | # options match the specified capped options
146 | options = cls._collection.options()
147 | if options.get('max') != max_documents or \
148 | options.get('size') != max_size:
149 | msg = (('Cannot create collection "%s" as a capped '
150 | 'collection as it already exists')
151 | % cls._collection)
152 | raise InvalidCollectionError(msg)
153 | else:
154 | # Create the collection as a capped collection
155 | opts = {'capped': True, 'size': max_size}
156 | if max_documents:
157 | opts['max'] = max_documents
158 | cls._collection = db.create_collection(
159 | collection_name, **opts
160 | )
161 | else:
162 | cls._collection = db[collection_name]
163 | return cls._collection
164 |
165 | def save(self, safe=True, force_insert=False, validate=True,
166 | write_options=None, cascade=None, cascade_kwargs=None,
167 | _refs=None):
168 | """Save the :class:`~mongoengine.Document` to the database. If the
169 | document already exists, it will be updated, otherwise it will be
170 | created.
171 |
172 | If ``safe=True`` and the operation is unsuccessful, an
173 | :class:`~mongoengine.OperationError` will be raised.
174 |
175 | :param safe: check if the operation succeeded before returning
176 | :param force_insert: only try to create a new document, don't allow
177 | updates of existing documents
178 | :param validate: validates the document; set to ``False`` to skip.
179 | :param write_options: Extra keyword arguments are passed down to
180 | :meth:`~pymongo.collection.Collection.save` OR
181 | :meth:`~pymongo.collection.Collection.insert`
182 | which will be used as options for the resultant
183 | ``getLastError`` command. For example,
184 | ``save(..., write_options={w: 2, fsync: True}, ...)`` will
185 | wait until at least two servers have recorded the write and
186 | will force an fsync on the primary server.
187 | :param cascade: Sets the flag for cascading saves. You can set a
188 | default by setting "cascade" in the document __meta__
189 | :param cascade_kwargs: optional kwargs dictionary to be passed throw
190 | to cascading saves
191 | :param _refs: A list of processed references used in cascading saves
192 |
193 | .. versionchanged:: 0.5
194 | In existing documents it only saves changed fields using
195 | set / unset. Saves are cascaded and any
196 | :class:`~bson.dbref.DBRef` objects that have changes are
197 | saved as well.
198 | .. versionchanged:: 0.6
199 | Cascade saves are optional = defaults to True, if you want
200 | fine grain control then you can turn off using document
201 | meta['cascade'] = False Also you can pass different kwargs to
202 | the cascade save using cascade_kwargs which overwrites the
203 | existing kwargs with custom values
204 | """
205 | signals.pre_save.send(self.__class__, document=self)
206 |
207 | if validate:
208 | self.validate()
209 |
210 | if not write_options:
211 | write_options = {}
212 |
213 | doc = self.to_mongo()
214 |
215 | created = force_insert or '_id' not in doc
216 |
217 | try:
218 | collection = self.__class__.objects._collection
219 | if created:
220 | if force_insert:
221 | object_id = collection.insert(doc, safe=safe,
222 | **write_options)
223 | else:
224 | object_id = collection.save(doc, safe=safe,
225 | **write_options)
226 | else:
227 | object_id = doc['_id']
228 | updates, removals = self._delta()
229 | # Need to add shard key to query, or you get an error
230 | select_dict = {'_id': object_id}
231 | shard_key = self.__class__._meta.get('shard_key', tuple())
232 | for k in shard_key:
233 | actual_key = self._db_field_map.get(k, k)
234 | select_dict[actual_key] = doc[actual_key]
235 |
236 | upsert = self._created
237 | if updates:
238 | collection.update(select_dict, {"$set": updates},
239 | upsert=upsert, safe=safe, **write_options)
240 | if removals:
241 | collection.update(select_dict, {"$unset": removals},
242 | upsert=upsert, safe=safe, **write_options)
243 |
244 | warn_cascade = not cascade and 'cascade' not in self._meta
245 | cascade = (self._meta.get('cascade', True)
246 | if cascade is None else cascade)
247 | if cascade:
248 | kwargs = {
249 | "safe": safe,
250 | "force_insert": force_insert,
251 | "validate": validate,
252 | "write_options": write_options,
253 | "cascade": cascade
254 | }
255 | if cascade_kwargs: # Allow granular control over cascades
256 | kwargs.update(cascade_kwargs)
257 | kwargs['_refs'] = _refs
258 | self.cascade_save(warn_cascade=warn_cascade, **kwargs)
259 |
260 | except pymongo.errors.OperationFailure, err:
261 | message = 'Could not save document (%s)'
262 | if re.match('^E1100[01] duplicate key', unicode(err)):
263 | # E11000 - duplicate key error index
264 | # E11001 - duplicate key on update
265 | message = u'Tried to save duplicate unique keys (%s)'
266 | raise NotUniqueError(message % unicode(err))
267 | raise OperationError(message % unicode(err))
268 | id_field = self._meta['id_field']
269 | if id_field not in self._meta.get('shard_key', []):
270 | self[id_field] = self._fields[id_field].to_python(object_id)
271 |
272 | self._changed_fields = []
273 | self._created = False
274 | signals.post_save.send(self.__class__, document=self, created=created)
275 | return self
276 |
277 | def cascade_save(self, warn_cascade=None, *args, **kwargs):
278 | """Recursively saves any references /
279 | generic references on an objects"""
280 | import fields
281 | _refs = kwargs.get('_refs', []) or []
282 |
283 | for name, cls in self._fields.items():
284 | if not isinstance(cls, (fields.ReferenceField,
285 | fields.GenericReferenceField)):
286 | continue
287 |
288 | ref = getattr(self, name)
289 | if not ref or isinstance(ref, DBRef):
290 | continue
291 |
292 | if not getattr(ref, '_changed_fields', True):
293 | continue
294 |
295 | ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
296 | if ref and ref_id not in _refs:
297 | if warn_cascade:
298 | msg = ("Cascading saves will default to off in 0.8, "
299 | "please explicitly set `.save(cascade=True)`")
300 | warnings.warn(msg, FutureWarning)
301 | _refs.append(ref_id)
302 | kwargs["_refs"] = _refs
303 | ref.save(**kwargs)
304 | ref._changed_fields = []
305 |
306 | @property
307 | def _object_key(self):
308 | """Dict to identify object in collection
309 | """
310 | select_dict = {'pk': self.pk}
311 | shard_key = self.__class__._meta.get('shard_key', tuple())
312 | for k in shard_key:
313 | select_dict[k] = getattr(self, k)
314 | return select_dict
315 |
316 | def update(self, **kwargs):
317 | """Performs an update on the :class:`~mongoengine.Document`
318 | A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
319 |
320 | Raises :class:`OperationError` if called on an object that has not yet
321 | been saved.
322 | """
323 | if not self.pk:
324 | raise OperationError('attempt to update a document not yet saved')
325 |
326 | # Need to add shard key to query, or you get an error
327 | return self.__class__.objects(**self._object_key).update_one(**kwargs)
328 |
329 | def delete(self, safe=False):
330 | """Delete the :class:`~mongoengine.Document` from the database. This
331 | will only take effect if the document has been previously saved.
332 |
333 | :param safe: check if the operation succeeded before returning
334 | """
335 | signals.pre_delete.send(self.__class__, document=self)
336 |
337 | try:
338 | self.__class__.objects(**self._object_key).delete(safe=safe)
339 | except pymongo.errors.OperationFailure, err:
340 | message = u'Could not delete document (%s)' % err.message
341 | raise OperationError(message)
342 |
343 | signals.post_delete.send(self.__class__, document=self)
344 |
345 | def select_related(self, max_depth=1):
346 | """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
347 | a maximum depth in order to cut down the number queries to mongodb.
348 |
349 | .. versionadded:: 0.5
350 | """
351 | import dereference
352 | self._data = dereference.DeReference()(self._data, max_depth)
353 | return self
354 |
355 | def reload(self, max_depth=1):
356 | """Reloads all attributes from the database.
357 |
358 | .. versionadded:: 0.1.2
359 | .. versionchanged:: 0.6 Now chainable
360 | """
361 | id_field = self._meta['id_field']
362 | obj = self.__class__.objects(
363 | **{id_field: self[id_field]}
364 | ).limit(1).select_related(max_depth=max_depth)
365 | if obj:
366 | obj = obj[0]
367 | else:
368 | msg = "Reloaded document has been deleted"
369 | raise OperationError(msg)
370 | for field in self._fields:
371 | setattr(self, field, self._reload(field, obj[field]))
372 | if self._dynamic:
373 | for name in self._dynamic_fields.keys():
374 | setattr(self, name, self._reload(name, obj._data[name]))
375 | self._changed_fields = obj._changed_fields
376 | return obj
377 |
378 | def _reload(self, key, value):
379 | """Used by :meth:`~mongoengine.Document.reload` to ensure the
380 | correct instance is linked to self.
381 | """
382 | if isinstance(value, BaseDict):
383 | value = [(k, self._reload(k, v)) for k, v in value.items()]
384 | value = BaseDict(value, self, key)
385 | elif isinstance(value, BaseList):
386 | value = [self._reload(key, v) for v in value]
387 | value = BaseList(value, self, key)
388 | elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
389 | value._changed_fields = []
390 | return value
391 |
392 | def to_dbref(self):
393 | """Returns an instance of :class:`~bson.dbref.DBRef` useful in
394 | `__raw__` queries."""
395 | if not self.pk:
396 | msg = "Only saved documents can have a valid dbref"
397 | raise OperationError(msg)
398 | return DBRef(self.__class__._get_collection_name(), self.pk)
399 |
400 | @classmethod
401 | def register_delete_rule(cls, document_cls, field_name, rule):
402 | """This method registers the delete rules to apply when removing this
403 | object.
404 | """
405 | delete_rules = cls._meta.get('delete_rules') or {}
406 | delete_rules[(document_cls, field_name)] = rule
407 | cls._meta['delete_rules'] = delete_rules
408 |
409 | @classmethod
410 | def drop_collection(cls):
411 | """Drops the entire collection associated with this
412 | :class:`~mongoengine.Document` type from the database.
413 | """
414 | db = cls._get_db()
415 | db.drop_collection(cls._get_collection_name())
416 | queryset.QuerySet._reset_already_indexed(cls)
417 |
418 |
419 | class DynamicDocument(Document):
420 | """A Dynamic Document class allowing flexible, expandable and uncontrolled
421 | schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
422 | way as an ordinary document but has expando style properties. Any data
423 | passed or set against the :class:`~mongoengine.DynamicDocument` that is
424 | not a field is automatically converted into a
425 | :class:`~mongoengine.DynamicField` and data can be attributed to that
426 | field.
427 |
428 | .. note::
429 |
430 | There is one caveat on Dynamic Documents: fields cannot start with `_`
431 | """
432 |
433 | # The __metaclass__ attribute is removed by 2to3 when running with Python3
434 | # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
435 | my_metaclass = TopLevelDocumentMetaclass
436 | __metaclass__ = TopLevelDocumentMetaclass
437 |
438 | _dynamic = True
439 |
440 | def __delattr__(self, *args, **kwargs):
441 | """Deletes the attribute by setting to None and allowing _delta to unset
442 | it"""
443 | field_name = args[0]
444 | if field_name in self._dynamic_fields:
445 | setattr(self, field_name, None)
446 | else:
447 | super(DynamicDocument, self).__delattr__(*args, **kwargs)
448 |
449 |
450 | class DynamicEmbeddedDocument(EmbeddedDocument):
451 | """A Dynamic Embedded Document class allowing flexible, expandable and
452 | uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
453 | information about dynamic documents.
454 | """
455 |
456 | # The __metaclass__ attribute is removed by 2to3 when running with Python3
457 | # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
458 | my_metaclass = DocumentMetaclass
459 | __metaclass__ = DocumentMetaclass
460 |
461 | _dynamic = True
462 |
463 | def __delattr__(self, *args, **kwargs):
464 | """Deletes the attribute by setting to None and allowing _delta to unset
465 | it"""
466 | field_name = args[0]
467 | setattr(self, field_name, None)
468 |
469 |
470 | class MapReduceDocument(object):
471 | """A document returned from a map/reduce query.
472 |
473 | :param collection: An instance of :class:`~pymongo.Collection`
474 | :param key: Document/result key, often an instance of
475 | :class:`~bson.objectid.ObjectId`. If supplied as
476 | an ``ObjectId`` found in the given ``collection``,
477 | the object can be accessed via the ``object`` property.
478 | :param value: The result(s) for this key.
479 |
480 | .. versionadded:: 0.3
481 | """
482 |
483 | def __init__(self, document, collection, key, value):
484 | self._document = document
485 | self._collection = collection
486 | self.key = key
487 | self.value = value
488 |
489 | @property
490 | def object(self):
491 | """Lazy-load the object referenced by ``self.key``. ``self.key``
492 | should be the ``primary_key``.
493 | """
494 | id_field = self._document()._meta['id_field']
495 | id_field_type = type(id_field)
496 |
497 | if not isinstance(self.key, id_field_type):
498 | try:
499 | self.key = id_field_type(self.key)
500 | except:
501 | raise Exception("Could not cast key as %s" % \
502 | id_field_type.__name__)
503 |
504 | if not hasattr(self, "_key_object"):
505 | self._key_object = self._document.objects.with_id(self.key)
506 | return self._key_object
507 | return self._key_object
508 |
--------------------------------------------------------------------------------