├── .circleci └── config.yml ├── .gitignore ├── .travis.yml ├── AUTHORS ├── CONTRIBUTING.rst ├── DIFFERENCES.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── README_MONGOENGINE.rst ├── benchmark.py ├── docs ├── Makefile ├── _themes │ └── nature │ │ ├── static │ │ ├── nature.css_t │ │ └── pygments.css │ │ └── theme.conf ├── apireference.rst ├── changelog.rst ├── code │ └── tumblelog.py ├── conf.py ├── guide │ ├── connecting.rst │ ├── defining-documents.rst │ ├── document-instances.rst │ ├── gridfs.rst │ ├── index.rst │ ├── installing.rst │ ├── querying.rst │ └── signals.rst ├── index.rst ├── tutorial.rst └── upgrade.rst ├── mongoengine ├── __init__.py ├── base │ ├── __init__.py │ ├── common.py │ ├── datastructures.py │ ├── document.py │ ├── fields.py │ ├── metaclasses.py │ └── proxy.py ├── common.py ├── connection.py ├── context_managers.py ├── dereference.py ├── document.py ├── errors.py ├── fields.py ├── pymongo_support.py ├── python_support.py ├── queryset │ ├── __init__.py │ ├── field_list.py │ ├── manager.py │ ├── queryset.py │ ├── transform.py │ └── visitor.py └── signals.py ├── python-mongoengine.spec ├── requirements.txt ├── setup.cfg ├── setup.py ├── test-requirements.txt └── tests ├── __init__.py ├── document ├── __init__.py ├── test_class_methods.py ├── test_delta.py ├── test_dynamic.py ├── test_indexes.py ├── test_inheritance.py ├── test_instance.py ├── test_json_serialisation.py └── test_validation.py ├── fields ├── __init__.py ├── mongodb_leaf.png ├── mongoengine.png ├── test_fields.py ├── test_file.py └── test_geo.py ├── fixtures.py ├── queryset ├── __init__.py ├── test_field_list.py ├── test_geo.py ├── test_queryset.py ├── test_transform.py └── test_visitor.py ├── test_all_warnings.py ├── test_benchmark.py ├── test_connection.py ├── test_context_managers.py ├── test_dereference.py └── test_signals.py /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | workflows: 4 | version: 2 5 | workflow: 6 | jobs: 7 | - test-3.10 8 | - test-3.11 9 | - test-3.11-mongo6 10 | 11 | defaults: &defaults 12 | working_directory: ~/code 13 | steps: 14 | - checkout 15 | - run: 16 | name: Install dependencies 17 | command: pip install --user -r test-requirements.txt 18 | - run: 19 | name: Test 20 | command: pytest tests/ 21 | 22 | jobs: 23 | test-3.10: 24 | <<: *defaults 25 | docker: 26 | - image: circleci/python:3.10 27 | - image: mongo:5.0.23 28 | test-3.11: 29 | <<: *defaults 30 | docker: 31 | - image: cimg/python:3.11 32 | - image: mongo:5.0.23 33 | test-3.11-mongo6: 34 | <<: *defaults 35 | docker: 36 | - image: cimg/python:3.11 37 | - image: mongo:6.0.19 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .* 2 | !.gitignore 3 | !.circleci 4 | *~ 5 | *.py[co] 6 | .*.sw[po] 7 | *.egg 8 | docs/.build 9 | docs/_build 10 | build/ 11 | dist/ 12 | mongoengine.egg-info/ 13 | env/ 14 | venv/ 15 | .settings 16 | .project 17 | .pydevproject 18 | tests/test_bugfix.py 19 | htmlcov/ 20 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # http://travis-ci.org/#!/MongoEngine/mongoengine 2 | language: python 3 | python: 4 | - "2.7" 5 | env: 6 | - PYMONGO=2.7 7 | - PYMONGO=2.8 8 | before_install: 9 | - travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 10 | - echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | 11 | sudo tee /etc/apt/sources.list.d/mongodb.list 12 | - travis_retry sudo apt-get update 13 | - travis_retry sudo apt-get install mongodb-org-server 14 | install: 15 | - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi 16 | - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi 17 | - python setup.py install 18 | script: 19 | - python setup.py test 20 | branches: 21 | only: 22 | - master 23 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | The PRIMARY AUTHORS are (and/or have been): 2 | 3 | Ross Lawley 4 | Harry Marr 5 | Matt Dennewitz 6 | Deepak Thukral 7 | Florian Schlachter 8 | Steve Challis 9 | Wilson Júnior 10 | Dan Crosta https://github.com/dcrosta 11 | Laine Herron https://github.com/LaineHerron 12 | Thomas Steinacher http://thomasst.ch/ 13 | 14 | CONTRIBUTORS 15 | 16 | Dervived from the git logs, inevitably incomplete but all of whom and others 17 | have submitted patches, reported bugs and generally helped make MongoEngine 18 | that much better: 19 | 20 | * Harry Marr 21 | * Ross Lawley 22 | * blackbrrr 23 | * Florian Schlachter 24 | * Vincent Driessen 25 | * Steve Challis 26 | * flosch 27 | * Deepak Thukral 28 | * Colin Howe 29 | * Wilson Júnior (https://github.com/wpjunior) 30 | * Alistair Roche 31 | * Dan Crosta 32 | * Viktor Kerkez 33 | * Stephan Jaekel 34 | * Rached Ben Mustapha 35 | * Greg Turner 36 | * Daniel Hasselrot 37 | * Mircea Pasoi 38 | * Matt Chisholm 39 | * James Punteney 40 | * TimothéePeignier 41 | * Stuart Rackham 42 | * Serge Matveenko 43 | * Matt Dennewitz 44 | * Don Spaulding 45 | * Ales Zoulek 46 | * sshwsfc 47 | * sib 48 | * Samuel Clay 49 | * Nick Vlku 50 | * martin 51 | * Flavio Amieiro 52 | * Анхбаяр Лхагвадорж 53 | * Zak Johnson 54 | * Victor Farazdagi 55 | * vandersonmota 56 | * Theo Julienne 57 | * sp 58 | * Slavi Pantaleev 59 | * Richard Henry 60 | * Nicolas Perriault 61 | * Nick Vlku Jr 62 | * Michael Henson 63 | * Leo Honkanen 64 | * kuno 65 | * Josh Ourisman 66 | * Jaime 67 | * Igor Ivanov 68 | * Gregg Lind 69 | * Gareth Lloyd 70 | * Albert Choi 71 | * John Arnfield 72 | * grubberr 73 | * Paul Aliagas 74 | * Paul Cunnane 75 | * Julien Rebetez 76 | * Marc Tamlyn 77 | * Karim Allah 78 | * Adam Parrish 79 | * jpfarias 80 | * jonrscott 81 | * Alice Zoë Bevan-McGregor (https://github.com/amcgregor/) 82 | * Stephen Young 83 | * tkloc 84 | * aid 85 | * yamaneko1212 86 | * dave mankoff 87 | * Alexander G. Morano 88 | * jwilder 89 | * Joe Shaw 90 | * Adam Flynn 91 | * Ankhbayar 92 | * Jan Schrewe 93 | * David Koblas 94 | * Crittercism 95 | * Alvin Liang 96 | * andrewmlevy 97 | * Chris Faulkner 98 | * Ashwin Purohit 99 | * Shalabh Aggarwal 100 | * Chris Williams 101 | * Robert Kajic 102 | * Jacob Peddicord 103 | * Nils Hasenbanck 104 | * mostlystatic 105 | * Greg Banks 106 | * swashbuckler 107 | * Adam Reeve 108 | * Anthony Nemitz 109 | * deignacio 110 | * Shaun Duncan 111 | * Meir Kriheli 112 | * Andrey Fedoseev 113 | * aparajita 114 | * Tristan Escalada 115 | * Alexander Koshelev 116 | * Jaime Irurzun 117 | * Alexandre González 118 | * Tommi Komulainen 119 | * Peter Landry 120 | * biszkoptwielki 121 | * Anton Kolechkin 122 | * Sergey Nikitin 123 | * psychogenic 124 | * Stefan Wójcik 125 | * dimonb 126 | * Garry Polley 127 | * James Slagle 128 | * Adrian Scott 129 | * Peter Teichman 130 | * Jakub Kot 131 | * Jorge Bastida 132 | * Aleksandr Sorokoumov 133 | * Yohan Graterol 134 | * bool-dev 135 | * Russ Weeks 136 | * Paul Swartz 137 | * Sundar Raman 138 | * Benoit Louy 139 | * lraucy 140 | * hellysmile 141 | * Jaepil Jeong 142 | * Daniil Sharou 143 | * Stefan Wójcik 144 | * Pete Campton 145 | * Martyn Smith 146 | * Marcelo Anton 147 | * Aleksey Porfirov 148 | * Nicolas Trippar 149 | * Manuel Hermann 150 | * Gustavo Gawryszewski 151 | * Max Countryman 152 | * caitifbrito 153 | * lcya86 刘春洋 154 | * Martin Alderete (https://github.com/malderete) 155 | * Nick Joyce 156 | * Jared Forsyth 157 | * Kenneth Falck 158 | * Lukasz Balcerzak 159 | * Nicolas Cortot 160 | * Alex (https://github.com/kelsta) 161 | * Jin Zhang 162 | * Daniel Axtens 163 | * Leo-Naeka 164 | * Ryan Witt (https://github.com/ryanwitt) 165 | * Jiequan (https://github.com/Jiequan) 166 | * hensom (https://github.com/hensom) 167 | * zhy0216 (https://github.com/zhy0216) 168 | * istinspring (https://github.com/istinspring) 169 | * Massimo Santini (https://github.com/mapio) 170 | * Nigel McNie (https://github.com/nigelmcnie) 171 | * ygbourhis (https://github.com/ygbourhis) 172 | * Bob Dickinson (https://github.com/BobDickinson) 173 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing to MongoEngine 2 | =========================== 3 | 4 | MongoEngine has a large `community 5 | `_ and 6 | contributions are always encouraged. Contributions can be as simple as 7 | minor tweaks to the documentation. Please read these guidelines before 8 | sending a pull request. 9 | 10 | Bugfixes and New Features 11 | ------------------------- 12 | 13 | Before starting to write code, look for existing `tickets 14 | `_ or `create one 15 | `_ for your specific 16 | issue or feature request. That way you avoid working on something 17 | that might not be of interest or that has already been addressed. If in doubt 18 | post to the `user group ` 19 | 20 | Supported Interpreters 21 | ---------------------- 22 | 23 | MongoEngine supports CPython 2.6 and newer. Language 24 | features not supported by all interpreters can not be used. 25 | Please also ensure that your code is properly converted by 26 | `2to3 `_ for Python 3 support. 27 | 28 | Style Guide 29 | ----------- 30 | 31 | MongoEngine aims to follow `PEP8 `_ 32 | including 4 space indents and 79 character line limits. 33 | 34 | Testing 35 | ------- 36 | 37 | All tests are run on `Travis `_ 38 | and any pull requests are automatically tested by Travis. Any pull requests 39 | without tests will take longer to be integrated and might be refused. 40 | 41 | General Guidelines 42 | ------------------ 43 | 44 | - Avoid backward breaking changes if at all possible. 45 | - Write inline documentation for new classes and methods. 46 | - Write tests and make sure they pass (make sure you have a mongod 47 | running on the default port, then execute ``python setup.py test`` 48 | from the cmd line to run the test suite). 49 | - Add yourself to AUTHORS :) 50 | 51 | Documentation 52 | ------------- 53 | 54 | To contribute to the `API documentation 55 | `_ 56 | just make your changes to the inline documentation of the appropriate 57 | `source code `_ or `rst file 58 | `_ in a 59 | branch and submit a `pull request `_. 60 | You might also use the github `Edit `_ 61 | button. 62 | -------------------------------------------------------------------------------- /DIFFERENCES.md: -------------------------------------------------------------------------------- 1 | Differences between Mongomallard and Mongoengine 2 | ----- 3 | 4 | * All document fields are lazily evaluated, resulting in much faster object initialization time. 5 | * `_data` is removed due to lazy evaluation. `to_dict()` can be used to convert a document to a dictionary, and `_internal_data` contains previously evaluated data. 6 | * Field methods `to_python`, `from_python`, `to_mongo`, `value_for_instance`: 7 | * `to_python` is called when converting from a MongoDB type to a document Python type only. 8 | * `from_python` is called when converting an assignment in Python to the document Python type. 9 | * `to_mongo` is called when converting from a document Python type to a MongoDB type. 10 | * `value_for_instance` is called just before returning a value in Python allowing for instance-specific transformations. 11 | * `pre_init`, `post_init`, `pre_save_post_validation` signals are removed to ensure fast object initialization. 12 | * `DecimalField` is removed since there is no corresponding MongoDB type 13 | * `LongField` is removed since it is equivalent with `IntField` 14 | * Adding `SafeReferenceField` which returns None if the reference does not exist. 15 | * Adding `SafeReferenceListField` which doesn't return references that don't exist. 16 | * Accessing a `ListField(ReferenceField)` doesn't automatically dereference all objects since they are lazily evaluated. A `SafeReferenceListField` may be used instead. 17 | * Accessing a related object's id doesn't fetch the object from the database, e.g. `book.author.id` where author is a `ReferenceField` will not make a database lookup except when using a `SafeReferenceField`. When inheritance is allowed, a proxy object will be returned, otherwise a lazy object from the referenced document class will be returned. 18 | * The primary key is only stored as `_id` in the database and is referenced in Python as `pk` or as the name of the primary key field. 19 | * Saves are not cascaded by default. 20 | * `Document.save()` supports `full=True` keyword argument to force saving all model fields. 21 | * `_get_changed_fields()` / `_changed_fields` returns a set of field names (not db field names) 22 | * Simplified `EmailField` email regex to be more compatible 23 | * Assigning invalid types (e.g. an invalid string to `IntField`) raises immediately a `ValueError` 24 | * `order_by()` without an argument resets the ordering (no ordering will be applied) 25 | 26 | Untested / not implemented yet: 27 | ----- 28 | 29 | * Dynamic documents / `DynamicField`, dynamic addition/deletion of fields 30 | * Field display name methods 31 | * `SequenceField` 32 | * Pickling documents 33 | * `FileField` 34 | * All Geo fields 35 | * `no_dereference()` 36 | * using `SafeReferenceListField` with `GenericReferenceField` 37 | * `max_depth` argument for `doc.reload()` 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2009 See AUTHORS 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, 7 | copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the 9 | Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be 13 | included in all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include MANIFEST.in 2 | include README.rst 3 | include LICENSE 4 | include AUTHORS 5 | recursive-include docs * 6 | prune docs/_build 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | MongoMallard 2 | ============ 3 | 4 | MongoMallard is a fast ORM-like layer on top of PyMongo, based on MongoEngine. 5 | 6 | * Repository: https://github.com/elasticsales/mongoengine 7 | * See [README_MONGOENGINE](https://github.com/elasticsales/mongoengine/blob/master/README_MONGOENGINE.rst) for MongoEngine's README. 8 | * See [DIFFERENCES](https://github.com/elasticsales/mongoengine/blob/master/DIFFERENCES.md) for differences between MongoEngine and MongoMallard. 9 | 10 | 11 | Benchmarks 12 | ---------- 13 | 14 | Sample run on a 2.7 GHz Intel Core i5 running OS X 10.8.3 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 |
MongoEngine 0.8.2 (ede9fcf)MongoMallard (478062c)Speedup
Doc initialization52.494us25.195us2.08x
Doc getattr1.339us0.584us2.29x
Doc setattr3.064us2.550us1.20x
Doc to mongo49.415us26.497us1.86x
Load from SON61.475us4.510us13.63x
Save to database434.389us289.972us2.29x
Load from database558.178us480.690us1.16x
Save/delete big object to database98.838ms65.789ms1.50x
Serialize big object from database31.390ms20.265ms1.55x
Load big object from database41.159ms1.400ms29.40x
84 | 85 | See [tests/benchmark.py](https://github.com/elasticsales/mongoengine/blob/master/tests/benchmark.py) for source code. 86 | -------------------------------------------------------------------------------- /README_MONGOENGINE.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | MongoEngine 3 | =========== 4 | :Info: MongoEngine is an ORM-like layer on top of PyMongo. 5 | :Repository: https://github.com/MongoEngine/mongoengine 6 | :Author: Harry Marr (http://github.com/hmarr) 7 | :Maintainer: Ross Lawley (http://github.com/rozza) 8 | 9 | .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master 10 | :target: http://travis-ci.org/MongoEngine/mongoengine 11 | 12 | About 13 | ===== 14 | MongoEngine is a Python Object-Document Mapper for working with MongoDB. 15 | Documentation available at http://mongoengine-odm.rtfd.org - there is currently 16 | a `tutorial `_, a `user guide 17 | `_ and an `API reference 18 | `_. 19 | 20 | Installation 21 | ============ 22 | If you have `setuptools `_ 23 | you can use ``easy_install -U mongoengine``. Otherwise, you can download the 24 | source from `GitHub `_ and run ``python 25 | setup.py install``. 26 | 27 | Dependencies 28 | ============ 29 | - pymongo 2.5+ 30 | - sphinx (optional - for documentation generation) 31 | 32 | Examples 33 | ======== 34 | Some simple examples of what MongoEngine code looks like:: 35 | 36 | class BlogPost(Document): 37 | title = StringField(required=True, max_length=200) 38 | posted = DateTimeField(default=datetime.datetime.now) 39 | tags = ListField(StringField(max_length=50)) 40 | 41 | class TextPost(BlogPost): 42 | content = StringField(required=True) 43 | 44 | class LinkPost(BlogPost): 45 | url = StringField(required=True) 46 | 47 | # Create a text-based post 48 | >>> post1 = TextPost(title='Using MongoEngine', content='See the tutorial') 49 | >>> post1.tags = ['mongodb', 'mongoengine'] 50 | >>> post1.save() 51 | 52 | # Create a link-based post 53 | >>> post2 = LinkPost(title='MongoEngine Docs', url='hmarr.com/mongoengine') 54 | >>> post2.tags = ['mongoengine', 'documentation'] 55 | >>> post2.save() 56 | 57 | # Iterate over all posts using the BlogPost superclass 58 | >>> for post in BlogPost.objects: 59 | ... print '===', post.title, '===' 60 | ... if isinstance(post, TextPost): 61 | ... print post.content 62 | ... elif isinstance(post, LinkPost): 63 | ... print 'Link:', post.url 64 | ... print 65 | ... 66 | 67 | >>> len(BlogPost.objects) 68 | 2 69 | >>> len(HtmlPost.objects) 70 | 1 71 | >>> len(LinkPost.objects) 72 | 1 73 | 74 | # Find tagged posts 75 | >>> len(BlogPost.objects(tags='mongoengine')) 76 | 2 77 | >>> len(BlogPost.objects(tags='mongodb')) 78 | 1 79 | 80 | Tests 81 | ===== 82 | To run the test suite, ensure you are running a local instance of MongoDB on 83 | the standard port, and run: ``python setup.py test``. 84 | 85 | Community 86 | ========= 87 | - `MongoEngine Users mailing list 88 | `_ 89 | - `MongoEngine Developers mailing list 90 | `_ 91 | - `#mongoengine IRC channel `_ 92 | 93 | Contributing 94 | ============ 95 | We welcome contributions! see the`Contribution guidelines `_ 96 | -------------------------------------------------------------------------------- /benchmark.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import timeit 4 | 5 | 6 | def cprofile_main(): 7 | from pymongo import Connection 8 | connection = Connection() 9 | connection.drop_database('timeit_test') 10 | connection.close() 11 | 12 | from mongoengine import Document, DictField, connect 13 | connect("timeit_test") 14 | 15 | class Noddy(Document): 16 | fields = DictField() 17 | 18 | for i in range(1): 19 | noddy = Noddy() 20 | for j in range(20): 21 | noddy.fields["key" + str(j)] = "value " + str(j) 22 | noddy.save() 23 | 24 | 25 | def main(): 26 | """ 27 | 0.4 Performance Figures ... 28 | 29 | ---------------------------------------------------------------------------------------------------- 30 | Creating 10000 dictionaries - Pymongo 31 | 3.86744189262 32 | ---------------------------------------------------------------------------------------------------- 33 | Creating 10000 dictionaries - MongoEngine 34 | 6.23374891281 35 | ---------------------------------------------------------------------------------------------------- 36 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False 37 | 5.33027005196 38 | ---------------------------------------------------------------------------------------------------- 39 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False 40 | pass - No Cascade 41 | 42 | 0.5.X 43 | ---------------------------------------------------------------------------------------------------- 44 | Creating 10000 dictionaries - Pymongo 45 | 3.89597702026 46 | ---------------------------------------------------------------------------------------------------- 47 | Creating 10000 dictionaries - MongoEngine 48 | 21.7735359669 49 | ---------------------------------------------------------------------------------------------------- 50 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False 51 | 19.8670389652 52 | ---------------------------------------------------------------------------------------------------- 53 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False 54 | pass - No Cascade 55 | 56 | 0.6.X 57 | ---------------------------------------------------------------------------------------------------- 58 | Creating 10000 dictionaries - Pymongo 59 | 3.81559205055 60 | ---------------------------------------------------------------------------------------------------- 61 | Creating 10000 dictionaries - MongoEngine 62 | 10.0446798801 63 | ---------------------------------------------------------------------------------------------------- 64 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False 65 | 9.51354718208 66 | ---------------------------------------------------------------------------------------------------- 67 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False 68 | 9.02567505836 69 | ---------------------------------------------------------------------------------------------------- 70 | Creating 10000 dictionaries - MongoEngine, force=True 71 | 8.44933390617 72 | 73 | 0.7.X 74 | ---------------------------------------------------------------------------------------------------- 75 | Creating 10000 dictionaries - Pymongo 76 | 3.78801012039 77 | ---------------------------------------------------------------------------------------------------- 78 | Creating 10000 dictionaries - MongoEngine 79 | 9.73050498962 80 | ---------------------------------------------------------------------------------------------------- 81 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False 82 | 8.33456707001 83 | ---------------------------------------------------------------------------------------------------- 84 | Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False 85 | 8.37778115273 86 | ---------------------------------------------------------------------------------------------------- 87 | Creating 10000 dictionaries - MongoEngine, force=True 88 | 8.36906409264 89 | 0.8.X 90 | ---------------------------------------------------------------------------------------------------- 91 | Creating 10000 dictionaries - Pymongo 92 | 3.69964408875 93 | ---------------------------------------------------------------------------------------------------- 94 | Creating 10000 dictionaries - Pymongo write_concern={"w": 0} 95 | 3.5526599884 96 | ---------------------------------------------------------------------------------------------------- 97 | Creating 10000 dictionaries - MongoEngine 98 | 7.00959801674 99 | ---------------------------------------------------------------------------------------------------- 100 | Creating 10000 dictionaries without continual assign - MongoEngine 101 | 5.60943293571 102 | ---------------------------------------------------------------------------------------------------- 103 | Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True 104 | 6.715102911 105 | ---------------------------------------------------------------------------------------------------- 106 | Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True 107 | 5.50644683838 108 | ---------------------------------------------------------------------------------------------------- 109 | Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False 110 | 4.69851183891 111 | ---------------------------------------------------------------------------------------------------- 112 | Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False 113 | 4.68946313858 114 | ---------------------------------------------------------------------------------------------------- 115 | """ 116 | 117 | setup = """ 118 | from pymongo import MongoClient 119 | connection = MongoClient() 120 | connection.drop_database('timeit_test') 121 | """ 122 | 123 | stmt = """ 124 | from pymongo import MongoClient 125 | connection = MongoClient() 126 | 127 | db = connection.timeit_test 128 | noddy = db.noddy 129 | 130 | for i in xrange(10000): 131 | example = {'fields': {}} 132 | for j in range(20): 133 | example['fields']["key"+str(j)] = "value "+str(j) 134 | 135 | noddy.save(example) 136 | 137 | myNoddys = noddy.find() 138 | [n for n in myNoddys] # iterate 139 | """ 140 | 141 | print("-" * 100) 142 | print("""Creating 10000 dictionaries - Pymongo""") 143 | t = timeit.Timer(stmt=stmt, setup=setup) 144 | print(t.timeit(1)) 145 | 146 | stmt = """ 147 | from pymongo import MongoClient 148 | connection = MongoClient() 149 | 150 | db = connection.timeit_test 151 | noddy = db.noddy 152 | 153 | for i in xrange(10000): 154 | example = {'fields': {}} 155 | for j in range(20): 156 | example['fields']["key"+str(j)] = "value "+str(j) 157 | 158 | noddy.save(example, write_concern={"w": 0}) 159 | 160 | myNoddys = noddy.find() 161 | [n for n in myNoddys] # iterate 162 | """ 163 | 164 | print("-" * 100) 165 | print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") 166 | t = timeit.Timer(stmt=stmt, setup=setup) 167 | print(t.timeit(1)) 168 | 169 | setup = """ 170 | from pymongo import MongoClient 171 | connection = MongoClient() 172 | connection.drop_database('timeit_test') 173 | connection.close() 174 | 175 | from mongoengine import Document, DictField, connect 176 | connect("timeit_test") 177 | 178 | class Noddy(Document): 179 | fields = DictField() 180 | """ 181 | 182 | stmt = """ 183 | for i in xrange(10000): 184 | noddy = Noddy() 185 | for j in range(20): 186 | noddy.fields["key"+str(j)] = "value "+str(j) 187 | noddy.save() 188 | 189 | myNoddys = Noddy.objects() 190 | [n for n in myNoddys] # iterate 191 | """ 192 | 193 | print("-" * 100) 194 | print("""Creating 10000 dictionaries - MongoEngine""") 195 | t = timeit.Timer(stmt=stmt, setup=setup) 196 | print(t.timeit(1)) 197 | 198 | stmt = """ 199 | for i in xrange(10000): 200 | noddy = Noddy() 201 | fields = {} 202 | for j in range(20): 203 | fields["key"+str(j)] = "value "+str(j) 204 | noddy.fields = fields 205 | noddy.save() 206 | 207 | myNoddys = Noddy.objects() 208 | [n for n in myNoddys] # iterate 209 | """ 210 | 211 | print("-" * 100) 212 | print("""Creating 10000 dictionaries without continual assign - MongoEngine""") 213 | t = timeit.Timer(stmt=stmt, setup=setup) 214 | print(t.timeit(1)) 215 | 216 | stmt = """ 217 | for i in xrange(10000): 218 | noddy = Noddy() 219 | for j in range(20): 220 | noddy.fields["key"+str(j)] = "value "+str(j) 221 | noddy.save(write_concern={"w": 0}, cascade=True) 222 | 223 | myNoddys = Noddy.objects() 224 | [n for n in myNoddys] # iterate 225 | """ 226 | 227 | print("-" * 100) 228 | print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") 229 | t = timeit.Timer(stmt=stmt, setup=setup) 230 | print(t.timeit(1)) 231 | 232 | stmt = """ 233 | for i in xrange(10000): 234 | noddy = Noddy() 235 | for j in range(20): 236 | noddy.fields["key"+str(j)] = "value "+str(j) 237 | noddy.save(write_concern={"w": 0}, validate=False, cascade=True) 238 | 239 | myNoddys = Noddy.objects() 240 | [n for n in myNoddys] # iterate 241 | """ 242 | 243 | print("-" * 100) 244 | print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") 245 | t = timeit.Timer(stmt=stmt, setup=setup) 246 | print(t.timeit(1)) 247 | 248 | stmt = """ 249 | for i in xrange(10000): 250 | noddy = Noddy() 251 | for j in range(20): 252 | noddy.fields["key"+str(j)] = "value "+str(j) 253 | noddy.save(validate=False, write_concern={"w": 0}) 254 | 255 | myNoddys = Noddy.objects() 256 | [n for n in myNoddys] # iterate 257 | """ 258 | 259 | print("-" * 100) 260 | print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") 261 | t = timeit.Timer(stmt=stmt, setup=setup) 262 | print(t.timeit(1)) 263 | 264 | stmt = """ 265 | for i in xrange(10000): 266 | noddy = Noddy() 267 | for j in range(20): 268 | noddy.fields["key"+str(j)] = "value "+str(j) 269 | noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) 270 | 271 | myNoddys = Noddy.objects() 272 | [n for n in myNoddys] # iterate 273 | """ 274 | 275 | print("-" * 100) 276 | print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") 277 | t = timeit.Timer(stmt=stmt, setup=setup) 278 | print(t.timeit(1)) 279 | 280 | 281 | if __name__ == "__main__": 282 | main() 283 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " pickle to make pickle files" 22 | @echo " json to make JSON files" 23 | @echo " htmlhelp to make HTML files and a HTML help project" 24 | @echo " qthelp to make HTML files and a qthelp project" 25 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 26 | @echo " changes to make an overview of all changed/added/deprecated items" 27 | @echo " linkcheck to check all external links for integrity" 28 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 29 | 30 | clean: 31 | -rm -rf $(BUILDDIR)/* 32 | 33 | html: 34 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 35 | @echo 36 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 37 | 38 | dirhtml: 39 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 40 | @echo 41 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 42 | 43 | pickle: 44 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 45 | @echo 46 | @echo "Build finished; now you can process the pickle files." 47 | 48 | json: 49 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 50 | @echo 51 | @echo "Build finished; now you can process the JSON files." 52 | 53 | htmlhelp: 54 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 55 | @echo 56 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 57 | ".hhp project file in $(BUILDDIR)/htmlhelp." 58 | 59 | qthelp: 60 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 61 | @echo 62 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 63 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 64 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MongoEngine.qhcp" 65 | @echo "To view the help file:" 66 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MongoEngine.qhc" 67 | 68 | latex: 69 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 70 | @echo 71 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 72 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 73 | "run these through (pdf)latex." 74 | 75 | changes: 76 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 77 | @echo 78 | @echo "The overview file is in $(BUILDDIR)/changes." 79 | 80 | linkcheck: 81 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 82 | @echo 83 | @echo "Link check complete; look for any errors in the above output " \ 84 | "or in $(BUILDDIR)/linkcheck/output.txt." 85 | 86 | doctest: 87 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 88 | @echo "Testing of doctests in the sources finished, look at the " \ 89 | "results in $(BUILDDIR)/doctest/output.txt." 90 | -------------------------------------------------------------------------------- /docs/_themes/nature/static/nature.css_t: -------------------------------------------------------------------------------- 1 | /** 2 | * Sphinx stylesheet -- default theme 3 | * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | */ 5 | 6 | @import url("basic.css"); 7 | 8 | /* -- page layout ----------------------------------------------------------- */ 9 | 10 | body { 11 | font-family: Arial, sans-serif; 12 | font-size: 100%; 13 | background-color: #111; 14 | color: #555; 15 | margin: 0; 16 | padding: 0; 17 | } 18 | 19 | div.documentwrapper { 20 | float: left; 21 | width: 100%; 22 | } 23 | 24 | div.bodywrapper { 25 | margin: 0 0 0 230px; 26 | } 27 | 28 | hr{ 29 | border: 1px solid #B1B4B6; 30 | } 31 | 32 | div.document { 33 | background-color: #eee; 34 | } 35 | 36 | div.body { 37 | background-color: #ffffff; 38 | color: #3E4349; 39 | padding: 0 30px 30px 30px; 40 | font-size: 0.8em; 41 | } 42 | 43 | div.footer { 44 | color: #555; 45 | width: 100%; 46 | padding: 13px 0; 47 | text-align: center; 48 | font-size: 75%; 49 | } 50 | 51 | div.footer a { 52 | color: #444; 53 | text-decoration: underline; 54 | } 55 | 56 | div.related { 57 | background-color: #6BA81E; 58 | line-height: 32px; 59 | color: #fff; 60 | text-shadow: 0px 1px 0 #444; 61 | font-size: 0.80em; 62 | } 63 | 64 | div.related a { 65 | color: #E2F3CC; 66 | } 67 | 68 | div.sphinxsidebar { 69 | font-size: 0.75em; 70 | line-height: 1.5em; 71 | } 72 | 73 | div.sphinxsidebarwrapper{ 74 | padding: 20px 0; 75 | } 76 | 77 | div.sphinxsidebar h3, 78 | div.sphinxsidebar h4 { 79 | font-family: Arial, sans-serif; 80 | color: #222; 81 | font-size: 1.2em; 82 | font-weight: normal; 83 | margin: 0; 84 | padding: 5px 10px; 85 | background-color: #ddd; 86 | text-shadow: 1px 1px 0 white 87 | } 88 | 89 | div.sphinxsidebar h4{ 90 | font-size: 1.1em; 91 | } 92 | 93 | div.sphinxsidebar h3 a { 94 | color: #444; 95 | } 96 | 97 | 98 | div.sphinxsidebar p { 99 | color: #888; 100 | padding: 5px 20px; 101 | } 102 | 103 | div.sphinxsidebar p.topless { 104 | } 105 | 106 | div.sphinxsidebar ul { 107 | margin: 10px 20px; 108 | padding: 0; 109 | color: #000; 110 | } 111 | 112 | div.sphinxsidebar a { 113 | color: #444; 114 | } 115 | 116 | div.sphinxsidebar input { 117 | border: 1px solid #ccc; 118 | font-family: sans-serif; 119 | font-size: 1em; 120 | } 121 | 122 | div.sphinxsidebar input[type=text]{ 123 | margin-left: 20px; 124 | } 125 | 126 | /* -- body styles ----------------------------------------------------------- */ 127 | 128 | a { 129 | color: #005B81; 130 | text-decoration: none; 131 | } 132 | 133 | a:hover { 134 | color: #E32E00; 135 | text-decoration: underline; 136 | } 137 | 138 | div.body h1, 139 | div.body h2, 140 | div.body h3, 141 | div.body h4, 142 | div.body h5, 143 | div.body h6 { 144 | font-family: Arial, sans-serif; 145 | background-color: #BED4EB; 146 | font-weight: normal; 147 | color: #212224; 148 | margin: 30px 0px 10px 0px; 149 | padding: 5px 0 5px 10px; 150 | text-shadow: 0px 1px 0 white 151 | } 152 | 153 | div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } 154 | div.body h2 { font-size: 150%; background-color: #C8D5E3; } 155 | div.body h3 { font-size: 120%; background-color: #D8DEE3; } 156 | div.body h4 { font-size: 110%; background-color: #D8DEE3; } 157 | div.body h5 { font-size: 100%; background-color: #D8DEE3; } 158 | div.body h6 { font-size: 100%; background-color: #D8DEE3; } 159 | 160 | a.headerlink { 161 | color: #c60f0f; 162 | font-size: 0.8em; 163 | padding: 0 4px 0 4px; 164 | text-decoration: none; 165 | } 166 | 167 | a.headerlink:hover { 168 | background-color: #c60f0f; 169 | color: white; 170 | } 171 | 172 | div.body p, div.body dd, div.body li { 173 | line-height: 1.5em; 174 | } 175 | 176 | div.admonition p.admonition-title + p { 177 | display: inline; 178 | } 179 | 180 | div.highlight{ 181 | background-color: white; 182 | } 183 | 184 | div.note { 185 | background-color: #eee; 186 | border: 1px solid #ccc; 187 | } 188 | 189 | div.seealso { 190 | background-color: #ffc; 191 | border: 1px solid #ff6; 192 | } 193 | 194 | div.topic { 195 | background-color: #eee; 196 | } 197 | 198 | div.warning { 199 | background-color: #ffe4e4; 200 | border: 1px solid #f66; 201 | } 202 | 203 | p.admonition-title { 204 | display: inline; 205 | } 206 | 207 | p.admonition-title:after { 208 | content: ":"; 209 | } 210 | 211 | pre { 212 | padding: 10px; 213 | background-color: White; 214 | color: #222; 215 | line-height: 1.2em; 216 | border: 1px solid #C6C9CB; 217 | font-size: 1.2em; 218 | margin: 1.5em 0 1.5em 0; 219 | -webkit-box-shadow: 1px 1px 1px #d8d8d8; 220 | -moz-box-shadow: 1px 1px 1px #d8d8d8; 221 | } 222 | 223 | tt { 224 | background-color: #ecf0f3; 225 | color: #222; 226 | padding: 1px 2px; 227 | font-size: 1.2em; 228 | font-family: monospace; 229 | } 230 | -------------------------------------------------------------------------------- /docs/_themes/nature/static/pygments.css: -------------------------------------------------------------------------------- 1 | .c { color: #999988; font-style: italic } /* Comment */ 2 | .k { font-weight: bold } /* Keyword */ 3 | .o { font-weight: bold } /* Operator */ 4 | .cm { color: #999988; font-style: italic } /* Comment.Multiline */ 5 | .cp { color: #999999; font-weight: bold } /* Comment.preproc */ 6 | .c1 { color: #999988; font-style: italic } /* Comment.Single */ 7 | .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ 8 | .ge { font-style: italic } /* Generic.Emph */ 9 | .gr { color: #aa0000 } /* Generic.Error */ 10 | .gh { color: #999999 } /* Generic.Heading */ 11 | .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ 12 | .go { color: #111 } /* Generic.Output */ 13 | .gp { color: #555555 } /* Generic.Prompt */ 14 | .gs { font-weight: bold } /* Generic.Strong */ 15 | .gu { color: #aaaaaa } /* Generic.Subheading */ 16 | .gt { color: #aa0000 } /* Generic.Traceback */ 17 | .kc { font-weight: bold } /* Keyword.Constant */ 18 | .kd { font-weight: bold } /* Keyword.Declaration */ 19 | .kp { font-weight: bold } /* Keyword.Pseudo */ 20 | .kr { font-weight: bold } /* Keyword.Reserved */ 21 | .kt { color: #445588; font-weight: bold } /* Keyword.Type */ 22 | .m { color: #009999 } /* Literal.Number */ 23 | .s { color: #bb8844 } /* Literal.String */ 24 | .na { color: #008080 } /* Name.Attribute */ 25 | .nb { color: #999999 } /* Name.Builtin */ 26 | .nc { color: #445588; font-weight: bold } /* Name.Class */ 27 | .no { color: #ff99ff } /* Name.Constant */ 28 | .ni { color: #800080 } /* Name.Entity */ 29 | .ne { color: #990000; font-weight: bold } /* Name.Exception */ 30 | .nf { color: #990000; font-weight: bold } /* Name.Function */ 31 | .nn { color: #555555 } /* Name.Namespace */ 32 | .nt { color: #000080 } /* Name.Tag */ 33 | .nv { color: purple } /* Name.Variable */ 34 | .ow { font-weight: bold } /* Operator.Word */ 35 | .mf { color: #009999 } /* Literal.Number.Float */ 36 | .mh { color: #009999 } /* Literal.Number.Hex */ 37 | .mi { color: #009999 } /* Literal.Number.Integer */ 38 | .mo { color: #009999 } /* Literal.Number.Oct */ 39 | .sb { color: #bb8844 } /* Literal.String.Backtick */ 40 | .sc { color: #bb8844 } /* Literal.String.Char */ 41 | .sd { color: #bb8844 } /* Literal.String.Doc */ 42 | .s2 { color: #bb8844 } /* Literal.String.Double */ 43 | .se { color: #bb8844 } /* Literal.String.Escape */ 44 | .sh { color: #bb8844 } /* Literal.String.Heredoc */ 45 | .si { color: #bb8844 } /* Literal.String.Interpol */ 46 | .sx { color: #bb8844 } /* Literal.String.Other */ 47 | .sr { color: #808000 } /* Literal.String.Regex */ 48 | .s1 { color: #bb8844 } /* Literal.String.Single */ 49 | .ss { color: #bb8844 } /* Literal.String.Symbol */ 50 | .bp { color: #999999 } /* Name.Builtin.Pseudo */ 51 | .vc { color: #ff99ff } /* Name.Variable.Class */ 52 | .vg { color: #ff99ff } /* Name.Variable.Global */ 53 | .vi { color: #ff99ff } /* Name.Variable.Instance */ 54 | .il { color: #009999 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/_themes/nature/theme.conf: -------------------------------------------------------------------------------- 1 | [theme] 2 | inherit = basic 3 | stylesheet = nature.css 4 | pygments_style = tango 5 | -------------------------------------------------------------------------------- /docs/apireference.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | API Reference 3 | ============= 4 | 5 | Connecting 6 | ========== 7 | 8 | .. autofunction:: mongoengine.connect 9 | .. autofunction:: mongoengine.register_connection 10 | 11 | Documents 12 | ========= 13 | 14 | .. autoclass:: mongoengine.Document 15 | :members: 16 | 17 | .. attribute:: objects 18 | 19 | A :class:`~mongoengine.queryset.QuerySet` object that is created lazily 20 | on access. 21 | 22 | .. autoclass:: mongoengine.EmbeddedDocument 23 | :members: 24 | 25 | .. autoclass:: mongoengine.DynamicDocument 26 | :members: 27 | 28 | .. autoclass:: mongoengine.DynamicEmbeddedDocument 29 | :members: 30 | 31 | .. autoclass:: mongoengine.document.MapReduceDocument 32 | :members: 33 | 34 | .. autoclass:: mongoengine.ValidationError 35 | :members: 36 | 37 | Context Managers 38 | ================ 39 | 40 | .. autoclass:: mongoengine.context_managers.switch_db 41 | .. autoclass:: mongoengine.context_managers.no_dereference 42 | .. autoclass:: mongoengine.context_managers.query_counter 43 | 44 | Querying 45 | ======== 46 | 47 | .. autoclass:: mongoengine.queryset.QuerySet 48 | :members: 49 | 50 | .. automethod:: mongoengine.queryset.QuerySet.__call__ 51 | 52 | .. autofunction:: mongoengine.queryset.queryset_manager 53 | 54 | Fields 55 | ====== 56 | 57 | .. autoclass:: mongoengine.base.fields.BaseField 58 | .. autoclass:: mongoengine.fields.StringField 59 | .. autoclass:: mongoengine.fields.URLField 60 | .. autoclass:: mongoengine.fields.EmailField 61 | .. autoclass:: mongoengine.fields.IntField 62 | .. autoclass:: mongoengine.fields.LongField 63 | .. autoclass:: mongoengine.fields.FloatField 64 | .. autoclass:: mongoengine.fields.DecimalField 65 | .. autoclass:: mongoengine.fields.BooleanField 66 | .. autoclass:: mongoengine.fields.DateTimeField 67 | .. autoclass:: mongoengine.fields.ComplexDateTimeField 68 | .. autoclass:: mongoengine.fields.EmbeddedDocumentField 69 | .. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField 70 | .. autoclass:: mongoengine.fields.DynamicField 71 | .. autoclass:: mongoengine.fields.ListField 72 | .. autoclass:: mongoengine.fields.SortedListField 73 | .. autoclass:: mongoengine.fields.DictField 74 | .. autoclass:: mongoengine.fields.MapField 75 | .. autoclass:: mongoengine.fields.ReferenceField 76 | .. autoclass:: mongoengine.fields.GenericReferenceField 77 | .. autoclass:: mongoengine.fields.BinaryField 78 | .. autoclass:: mongoengine.fields.FileField 79 | .. autoclass:: mongoengine.fields.ImageField 80 | .. autoclass:: mongoengine.fields.SequenceField 81 | .. autoclass:: mongoengine.fields.ObjectIdField 82 | .. autoclass:: mongoengine.fields.UUIDField 83 | .. autoclass:: mongoengine.fields.GeoPointField 84 | .. autoclass:: mongoengine.fields.PointField 85 | .. autoclass:: mongoengine.fields.LineStringField 86 | .. autoclass:: mongoengine.fields.PolygonField 87 | .. autoclass:: mongoengine.fields.GridFSError 88 | .. autoclass:: mongoengine.fields.GridFSProxy 89 | .. autoclass:: mongoengine.fields.ImageGridFsProxy 90 | .. autoclass:: mongoengine.fields.ImproperlyConfigured 91 | 92 | Misc 93 | ==== 94 | 95 | .. autofunction:: mongoengine.common._import_class 96 | -------------------------------------------------------------------------------- /docs/code/tumblelog.py: -------------------------------------------------------------------------------- 1 | from mongoengine import * 2 | 3 | connect('tumblelog') 4 | 5 | class Comment(EmbeddedDocument): 6 | content = StringField() 7 | name = StringField(max_length=120) 8 | 9 | class User(Document): 10 | email = StringField(required=True) 11 | first_name = StringField(max_length=50) 12 | last_name = StringField(max_length=50) 13 | 14 | class Post(Document): 15 | title = StringField(max_length=120, required=True) 16 | author = ReferenceField(User) 17 | tags = ListField(StringField(max_length=30)) 18 | comments = ListField(EmbeddedDocumentField(Comment)) 19 | 20 | class TextPost(Post): 21 | content = StringField() 22 | 23 | class ImagePost(Post): 24 | image_path = StringField() 25 | 26 | class LinkPost(Post): 27 | link_url = StringField() 28 | 29 | Post.drop_collection() 30 | 31 | john = User(email='jdoe@example.com', first_name='John', last_name='Doe') 32 | john.save() 33 | 34 | post1 = TextPost(title='Fun with MongoEngine', author=john) 35 | post1.content = 'Took a look at MongoEngine today, looks pretty cool.' 36 | post1.tags = ['mongodb', 'mongoengine'] 37 | post1.save() 38 | 39 | post2 = LinkPost(title='MongoEngine Documentation', author=john) 40 | post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' 41 | post2.tags = ['mongoengine'] 42 | post2.save() 43 | 44 | print('ALL POSTS') 45 | print() 46 | for post in Post.objects: 47 | print(post.title) 48 | print('=' * post.title.count()) 49 | 50 | if isinstance(post, TextPost): 51 | print(post.content) 52 | 53 | if isinstance(post, LinkPost): 54 | print('Link:', post.link_url) 55 | 56 | print() 57 | print() 58 | 59 | print('POSTS TAGGED \'MONGODB\'') 60 | print() 61 | for post in Post.objects(tags='mongodb'): 62 | print(post.title) 63 | print() 64 | 65 | num_posts = Post.objects(tags='mongodb').count() 66 | print('Found %d posts with tag "mongodb"' % num_posts) 67 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # MongoEngine documentation build configuration file, created by 4 | # sphinx-quickstart on Sun Nov 22 18:14:13 2009. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import sys, os 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | sys.path.insert(0, os.path.abspath('..')) 20 | 21 | # -- General configuration ----------------------------------------------------- 22 | 23 | # Add any Sphinx extension module names here, as strings. They can be extensions 24 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 25 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] 26 | 27 | # Add any paths that contain templates here, relative to this directory. 28 | templates_path = ['_templates'] 29 | 30 | # The suffix of source filenames. 31 | source_suffix = '.rst' 32 | 33 | # The encoding of source files. 34 | #source_encoding = 'utf-8' 35 | 36 | # The master toctree document. 37 | master_doc = 'index' 38 | 39 | # General information about the project. 40 | project = 'MongoEngine' 41 | copyright = '2009, MongoEngine Authors' 42 | 43 | # The version info for the project you're documenting, acts as replacement for 44 | # |version| and |release|, also used in various other places throughout the 45 | # built documents. 46 | # 47 | import mongoengine 48 | # The short X.Y version. 49 | version = mongoengine.get_version() 50 | # The full version, including alpha/beta/rc tags. 51 | release = mongoengine.get_version() 52 | 53 | # The language for content autogenerated by Sphinx. Refer to documentation 54 | # for a list of supported languages. 55 | #language = None 56 | 57 | # There are two options for replacing |today|: either, you set today to some 58 | # non-false value, then it is used: 59 | #today = '' 60 | # Else, today_fmt is used as the format for a strftime call. 61 | #today_fmt = '%B %d, %Y' 62 | 63 | # List of documents that shouldn't be included in the build. 64 | #unused_docs = [] 65 | 66 | # List of directories, relative to source directory, that shouldn't be searched 67 | # for source files. 68 | exclude_trees = ['_build'] 69 | 70 | # The reST default role (used for this markup: `text`) to use for all documents. 71 | #default_role = None 72 | 73 | # If true, '()' will be appended to :func: etc. cross-reference text. 74 | #add_function_parentheses = True 75 | 76 | # If true, the current module name will be prepended to all description 77 | # unit titles (such as .. function::). 78 | #add_module_names = True 79 | 80 | # If true, sectionauthor and moduleauthor directives will be shown in the 81 | # output. They are ignored by default. 82 | #show_authors = False 83 | 84 | # The name of the Pygments (syntax highlighting) style to use. 85 | pygments_style = 'sphinx' 86 | 87 | # A list of ignored prefixes for module index sorting. 88 | #modindex_common_prefix = [] 89 | 90 | 91 | # -- Options for HTML output --------------------------------------------------- 92 | 93 | # The theme to use for HTML and HTML Help pages. Major themes that come with 94 | # Sphinx are currently 'default' and 'sphinxdoc'. 95 | html_theme = 'nature' 96 | 97 | # Theme options are theme-specific and customize the look and feel of a theme 98 | # further. For a list of options available for each theme, see the 99 | # documentation. 100 | #html_theme_options = {} 101 | 102 | # Add any paths that contain custom themes here, relative to this directory. 103 | html_theme_path = ['_themes'] 104 | 105 | # The name for this set of Sphinx documents. If None, it defaults to 106 | # " v documentation". 107 | #html_title = None 108 | 109 | # A shorter title for the navigation bar. Default is the same as html_title. 110 | #html_short_title = None 111 | 112 | # The name of an image file (relative to this directory) to place at the top 113 | # of the sidebar. 114 | #html_logo = None 115 | 116 | # The name of an image file (within the static path) to use as favicon of the 117 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 118 | # pixels large. 119 | #html_favicon = None 120 | 121 | # Add any paths that contain custom static files (such as style sheets) here, 122 | # relative to this directory. They are copied after the builtin static files, 123 | # so a file named "default.css" will overwrite the builtin "default.css". 124 | #html_static_path = ['_static'] 125 | 126 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 127 | # using the given strftime format. 128 | #html_last_updated_fmt = '%b %d, %Y' 129 | 130 | # If true, SmartyPants will be used to convert quotes and dashes to 131 | # typographically correct entities. 132 | html_use_smartypants = True 133 | 134 | # Custom sidebar templates, maps document names to template names. 135 | html_sidebars = { 136 | 'index': ['globaltoc.html', 'searchbox.html'], 137 | '**': ['localtoc.html', 'relations.html', 'searchbox.html'] 138 | } 139 | 140 | 141 | # Additional templates that should be rendered to pages, maps page names to 142 | # template names. 143 | #html_additional_pages = {} 144 | 145 | # If false, no module index is generated. 146 | #html_use_modindex = True 147 | 148 | # If false, no index is generated. 149 | #html_use_index = True 150 | 151 | # If true, the index is split into individual pages for each letter. 152 | #html_split_index = False 153 | 154 | # If true, links to the reST sources are added to the pages. 155 | #html_show_sourcelink = True 156 | 157 | # If true, an OpenSearch description file will be output, and all pages will 158 | # contain a tag referring to it. The value of this option must be the 159 | # base URL from which the finished HTML is served. 160 | #html_use_opensearch = '' 161 | 162 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). 163 | #html_file_suffix = '' 164 | 165 | # Output file base name for HTML help builder. 166 | htmlhelp_basename = 'MongoEnginedoc' 167 | 168 | 169 | # -- Options for LaTeX output -------------------------------------------------- 170 | 171 | # The paper size ('letter' or 'a4'). 172 | latex_paper_size = 'a4' 173 | 174 | # The font size ('10pt', '11pt' or '12pt'). 175 | #latex_font_size = '10pt' 176 | 177 | # Grouping the document tree into LaTeX files. List of tuples 178 | # (source start file, target name, title, author, documentclass [howto/manual]). 179 | latex_documents = [ 180 | ('index', 'MongoEngine.tex', 'MongoEngine Documentation', 181 | 'Ross Lawley', 'manual'), 182 | ] 183 | 184 | # The name of an image file (relative to this directory) to place at the top of 185 | # the title page. 186 | #latex_logo = None 187 | 188 | # For "manual" documents, if this is true, then toplevel headings are parts, 189 | # not chapters. 190 | #latex_use_parts = False 191 | 192 | # Additional stuff for the LaTeX preamble. 193 | #latex_preamble = '' 194 | 195 | # Documents to append as an appendix to all manuals. 196 | #latex_appendices = [] 197 | 198 | # If false, no module index is generated. 199 | #latex_use_modindex = True 200 | 201 | autoclass_content = 'both' 202 | 203 | -------------------------------------------------------------------------------- /docs/guide/connecting.rst: -------------------------------------------------------------------------------- 1 | .. _guide-connecting: 2 | 3 | ===================== 4 | Connecting to MongoDB 5 | ===================== 6 | 7 | To connect to a running instance of :program:`mongod`, use the 8 | :func:`~mongoengine.connect` function. The first argument is the name of the 9 | database to connect to:: 10 | 11 | from mongoengine import connect 12 | connect('project1') 13 | 14 | By default, MongoEngine assumes that the :program:`mongod` instance is running 15 | on **localhost** on port **27017**. If MongoDB is running elsewhere, you should 16 | provide the :attr:`host` and :attr:`port` arguments to 17 | :func:`~mongoengine.connect`:: 18 | 19 | connect('project1', host='192.168.1.35', port=12345) 20 | 21 | If the database requires authentication, :attr:`username` and :attr:`password` 22 | arguments should be provided:: 23 | 24 | connect('project1', username='webapp', password='pwd123') 25 | 26 | Uri style connections are also supported as long as you include the database 27 | name - just supply the uri as the :attr:`host` to 28 | :func:`~mongoengine.connect`:: 29 | 30 | connect('project1', host='mongodb://localhost/database_name') 31 | 32 | ReplicaSets 33 | =========== 34 | 35 | MongoEngine supports replica sets through :class:`~pymongo.mongo_client.MongoClient`. 36 | To use them please use a URI style connection and provide the `replicaSet` name in the 37 | connection kwargs. 38 | 39 | Read preferences are supported through the connection or via individual 40 | queries by passing the read_preference :: 41 | 42 | Bar.objects().read_preference(ReadPreference.PRIMARY) 43 | Bar.objects(read_preference=ReadPreference.PRIMARY) 44 | 45 | Multiple Databases 46 | ================== 47 | 48 | Multiple database support was added in MongoEngine 0.6. To use multiple 49 | databases you can use :func:`~mongoengine.connect` and provide an `alias` name 50 | for the connection - if no `alias` is provided then "default" is used. 51 | 52 | In the background this uses :func:`~mongoengine.register_connection` to 53 | store the data and you can register all aliases up front if required. 54 | 55 | Individual documents can also support multiple databases by providing a 56 | `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects 57 | to point across databases and collections. Below is an example schema, using 58 | 3 different databases to store data:: 59 | 60 | class User(Document): 61 | name = StringField() 62 | 63 | meta = {"db_alias": "user-db"} 64 | 65 | class Book(Document): 66 | name = StringField() 67 | 68 | meta = {"db_alias": "book-db"} 69 | 70 | class AuthorBooks(Document): 71 | author = ReferenceField(User) 72 | book = ReferenceField(Book) 73 | 74 | meta = {"db_alias": "users-books-db"} 75 | 76 | 77 | Switch Database Context Manager 78 | =============================== 79 | 80 | Sometimes you may want to switch the database to query against for a class 81 | for example, archiving older data into a separate database for performance 82 | reasons. 83 | 84 | The :class:`~mongoengine.context_managers.switch_db` context manager allows 85 | you to change the database alias for a given class allowing quick and easy 86 | access to the same User document across databases:: 87 | 88 | from mongoengine.context_managers import switch_db 89 | 90 | class User(Document): 91 | name = StringField() 92 | 93 | meta = {"db_alias": "user-db"} 94 | 95 | with switch_db(User, 'archive-user-db') as User: 96 | User(name="Ross").save() # Saves the 'archive-user-db' 97 | 98 | .. note:: Make sure any aliases have been registered with 99 | :func:`~mongoengine.register_connection` before using the context manager. 100 | -------------------------------------------------------------------------------- /docs/guide/document-instances.rst: -------------------------------------------------------------------------------- 1 | =================== 2 | Documents instances 3 | =================== 4 | To create a new document object, create an instance of the relevant document 5 | class, providing values for its fields as its constructor keyword arguments. 6 | You may provide values for any of the fields on the document:: 7 | 8 | >>> page = Page(title="Test Page") 9 | >>> page.title 10 | 'Test Page' 11 | 12 | You may also assign values to the document's fields using standard object 13 | attribute syntax:: 14 | 15 | >>> page.title = "Example Page" 16 | >>> page.title 17 | 'Example Page' 18 | 19 | Saving and deleting documents 20 | ============================= 21 | MongoEngine tracks changes to documents to provide efficient saving. To save 22 | the document to the database, call the :meth:`~mongoengine.Document.save` method. 23 | If the document does not exist in the database, it will be created. If it does 24 | already exist, then any changes will be updated atomically. For example:: 25 | 26 | >>> page = Page(title="Test Page") 27 | >>> page.save() # Performs an insert 28 | >>> page.title = "My Page" 29 | >>> page.save() # Performs an atomic set on the title field. 30 | 31 | .. note:: 32 | 33 | Changes to documents are tracked and on the whole perform ``set`` operations. 34 | 35 | * ``list_field.push(0)`` - *sets* the resulting list 36 | * ``del(list_field)`` - *unsets* whole list 37 | 38 | With lists its preferable to use ``Doc.update(push__list_field=0)`` as 39 | this stops the whole list being updated - stopping any race conditions. 40 | 41 | .. seealso:: 42 | :ref:`guide-atomic-updates` 43 | 44 | Pre save data validation and cleaning 45 | ------------------------------------- 46 | MongoEngine allows you to create custom cleaning rules for your documents when 47 | calling :meth:`~mongoengine.Document.save`. By providing a custom 48 | :meth:`~mongoengine.Document.clean` method you can do any pre validation / data 49 | cleaning. 50 | 51 | This might be useful if you want to ensure a default value based on other 52 | document values for example:: 53 | 54 | class Essay(Document): 55 | status = StringField(choices=('Published', 'Draft'), required=True) 56 | pub_date = DateTimeField() 57 | 58 | def clean(self): 59 | """Ensures that only published essays have a `pub_date` and 60 | automatically sets the pub_date if published and not set""" 61 | if self.status == 'Draft' and self.pub_date is not None: 62 | msg = 'Draft entries should not have a publication date.' 63 | raise ValidationError(msg) 64 | # Set the pub_date for published items if not set. 65 | if self.status == 'Published' and self.pub_date is None: 66 | self.pub_date = datetime.now() 67 | 68 | .. note:: 69 | Cleaning is only called if validation is turned on and when calling 70 | :meth:`~mongoengine.Document.save`. 71 | 72 | Cascading Saves 73 | --------------- 74 | If your document contains :class:`~mongoengine.fields.ReferenceField` or 75 | :class:`~mongoengine.fields.GenericReferenceField` objects, then by default the 76 | :meth:`~mongoengine.Document.save` method will not save any changes to 77 | those objects. If you want all references to also be saved also, noting each 78 | save is a separate query, then passing :attr:`cascade` as True 79 | to the save method will cascade any saves. 80 | 81 | Deleting documents 82 | ------------------ 83 | To delete a document, call the :meth:`~mongoengine.Document.delete` method. 84 | Note that this will only work if the document exists in the database and has a 85 | valid :attr:`id`. 86 | 87 | Document IDs 88 | ============ 89 | Each document in the database has a unique id. This may be accessed through the 90 | :attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id 91 | will be generated automatically by the database server when the object is save, 92 | meaning that you may only access the :attr:`id` field once a document has been 93 | saved:: 94 | 95 | >>> page = Page(title="Test Page") 96 | >>> page.id 97 | >>> page.save() 98 | >>> page.id 99 | ObjectId('123456789abcdef000000000') 100 | 101 | Alternatively, you may define one of your own fields to be the document's 102 | "primary key" by providing ``primary_key=True`` as a keyword argument to a 103 | field's constructor. Under the hood, MongoEngine will use this field as the 104 | :attr:`id`; in fact :attr:`id` is actually aliased to your primary key field so 105 | you may still use :attr:`id` to access the primary key if you want:: 106 | 107 | >>> class User(Document): 108 | ... email = StringField(primary_key=True) 109 | ... name = StringField() 110 | ... 111 | >>> bob = User(email='bob@example.com', name='Bob') 112 | >>> bob.save() 113 | >>> bob.id == bob.email == 'bob@example.com' 114 | True 115 | 116 | You can also access the document's "primary key" using the :attr:`pk` field; in 117 | is an alias to :attr:`id`:: 118 | 119 | >>> page = Page(title="Another Test Page") 120 | >>> page.save() 121 | >>> page.id == page.pk 122 | 123 | .. note:: 124 | 125 | If you define your own primary key field, the field implicitly becomes 126 | required, so a :class:`~mongoengine.ValidationError` will be thrown if 127 | you don't provide it. 128 | -------------------------------------------------------------------------------- /docs/guide/gridfs.rst: -------------------------------------------------------------------------------- 1 | ====== 2 | GridFS 3 | ====== 4 | 5 | .. versionadded:: 0.4 6 | 7 | Writing 8 | ------- 9 | 10 | GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field 11 | object. This field acts as a file-like object and provides a couple of 12 | different ways of inserting and retrieving data. Arbitrary metadata such as 13 | content type can also be stored alongside the files. In the following example, 14 | a document is created to store details about animals, including a photo:: 15 | 16 | class Animal(Document): 17 | genus = StringField() 18 | family = StringField() 19 | photo = FileField() 20 | 21 | marmot = Animal(genus='Marmota', family='Sciuridae') 22 | 23 | marmot_photo = open('marmot.jpg', 'r') 24 | marmot.photo.put(marmot_photo, content_type = 'image/jpeg') 25 | marmot.save() 26 | 27 | Retrieval 28 | --------- 29 | 30 | So using the :class:`~mongoengine.fields.FileField` is just like using any other 31 | field. The file can also be retrieved just as easily:: 32 | 33 | marmot = Animal.objects(genus='Marmota').first() 34 | photo = marmot.photo.read() 35 | content_type = marmot.photo.content_type 36 | 37 | Streaming 38 | --------- 39 | 40 | Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a 41 | slightly different manner. First, a new file must be created by calling the 42 | :func:`new_file` method. Data can then be written using :func:`write`:: 43 | 44 | marmot.photo.new_file() 45 | marmot.photo.write('some_image_data') 46 | marmot.photo.write('some_more_image_data') 47 | marmot.photo.close() 48 | 49 | marmot.photo.save() 50 | 51 | Deletion 52 | -------- 53 | 54 | Deleting stored files is achieved with the :func:`delete` method:: 55 | 56 | marmot.photo.delete() 57 | 58 | .. warning:: 59 | 60 | The FileField in a Document actually only stores the ID of a file in a 61 | separate GridFS collection. This means that deleting a document 62 | with a defined FileField does not actually delete the file. You must be 63 | careful to delete any files in a Document as above before deleting the 64 | Document itself. 65 | 66 | 67 | Replacing files 68 | --------------- 69 | 70 | Files can be replaced with the :func:`replace` method. This works just like 71 | the :func:`put` method so even metadata can (and should) be replaced:: 72 | 73 | another_marmot = open('another_marmot.png', 'r') 74 | marmot.photo.replace(another_marmot, content_type='image/png') 75 | -------------------------------------------------------------------------------- /docs/guide/index.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | User Guide 3 | ========== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | 8 | installing 9 | connecting 10 | defining-documents 11 | document-instances 12 | querying 13 | gridfs 14 | signals 15 | -------------------------------------------------------------------------------- /docs/guide/installing.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | Installing MongoEngine 3 | ====================== 4 | 5 | To use MongoEngine, you will need to download `MongoDB `_ 6 | and ensure it is running in an accessible location. You will also need 7 | `PyMongo `_ to use MongoEngine, but if you 8 | install MongoEngine using setuptools, then the dependencies will be handled for 9 | you. 10 | 11 | MongoEngine is available on PyPI, so to use it you can use :program:`pip`: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install mongoengine 16 | 17 | Alternatively, if you don't have setuptools installed, `download it from PyPi 18 | `_ and run 19 | 20 | .. code-block:: console 21 | 22 | $ python setup.py install 23 | 24 | To use the bleeding-edge version of MongoEngine, you can get the source from 25 | `GitHub `_ and install it as above: 26 | 27 | .. code-block:: console 28 | 29 | $ git clone git://github.com/mongoengine/mongoengine 30 | $ cd mongoengine 31 | $ python setup.py install 32 | -------------------------------------------------------------------------------- /docs/guide/signals.rst: -------------------------------------------------------------------------------- 1 | .. _signals: 2 | 3 | ======= 4 | Signals 5 | ======= 6 | 7 | .. versionadded:: 0.5 8 | 9 | .. note:: 10 | 11 | Signal support is provided by the excellent `blinker`_ library. If you wish 12 | to enable signal support this library must be installed, though it is not 13 | required for MongoEngine to function. 14 | 15 | Overview 16 | -------- 17 | 18 | Signals are found within the `mongoengine.signals` module. Unless 19 | specified signals receive no additional arguments beyond the `sender` class and 20 | `document` instance. Post-signals are only called if there were no exceptions 21 | raised during the processing of their related function. 22 | 23 | Available signals include: 24 | 25 | `pre_init` 26 | Called during the creation of a new :class:`~mongoengine.Document` or 27 | :class:`~mongoengine.EmbeddedDocument` instance, after the constructor 28 | arguments have been collected but before any additional processing has been 29 | done to them. (I.e. assignment of default values.) Handlers for this signal 30 | are passed the dictionary of arguments using the `values` keyword argument 31 | and may modify this dictionary prior to returning. 32 | 33 | `post_init` 34 | Called after all processing of a new :class:`~mongoengine.Document` or 35 | :class:`~mongoengine.EmbeddedDocument` instance has been completed. 36 | 37 | `pre_save` 38 | Called within :meth:`~mongoengine.document.Document.save` prior to performing 39 | any actions. 40 | 41 | `pre_save_post_validation` 42 | Called within :meth:`~mongoengine.document.Document.save` after validation 43 | has taken place but before saving. 44 | 45 | `post_save` 46 | Called within :meth:`~mongoengine.document.Document.save` after all actions 47 | (validation, insert/update, cascades, clearing dirty flags) have completed 48 | successfully. Passed the additional boolean keyword argument `created` to 49 | indicate if the save was an insert or an update. 50 | 51 | `pre_delete` 52 | Called within :meth:`~mongoengine.document.Document.delete` prior to 53 | attempting the delete operation. 54 | 55 | `post_delete` 56 | Called within :meth:`~mongoengine.document.Document.delete` upon successful 57 | deletion of the record. 58 | 59 | `pre_bulk_insert` 60 | Called after validation of the documents to insert, but prior to any data 61 | being written. In this case, the `document` argument is replaced by a 62 | `documents` argument representing the list of documents being inserted. 63 | 64 | `post_bulk_insert` 65 | Called after a successful bulk insert operation. As per `pre_bulk_insert`, 66 | the `document` argument is omitted and replaced with a `documents` argument. 67 | An additional boolean argument, `loaded`, identifies the contents of 68 | `documents` as either :class:`~mongoengine.Document` instances when `True` or 69 | simply a list of primary key values for the inserted records if `False`. 70 | 71 | Attaching Events 72 | ---------------- 73 | 74 | After writing a handler function like the following:: 75 | 76 | import logging 77 | from datetime import datetime 78 | 79 | from mongoengine import * 80 | from mongoengine import signals 81 | 82 | def update_modified(sender, document): 83 | document.modified = datetime.utcnow() 84 | 85 | You attach the event handler to your :class:`~mongoengine.Document` or 86 | :class:`~mongoengine.EmbeddedDocument` subclass:: 87 | 88 | class Record(Document): 89 | modified = DateTimeField() 90 | 91 | signals.pre_save.connect(update_modified) 92 | 93 | While this is not the most elaborate document model, it does demonstrate the 94 | concepts involved. As a more complete demonstration you can also define your 95 | handlers within your subclass:: 96 | 97 | class Author(Document): 98 | name = StringField() 99 | 100 | @classmethod 101 | def pre_save(cls, sender, document, **kwargs): 102 | logging.debug("Pre Save: %s" % document.name) 103 | 104 | @classmethod 105 | def post_save(cls, sender, document, **kwargs): 106 | logging.debug("Post Save: %s" % document.name) 107 | if 'created' in kwargs: 108 | if kwargs['created']: 109 | logging.debug("Created") 110 | else: 111 | logging.debug("Updated") 112 | 113 | signals.pre_save.connect(Author.pre_save, sender=Author) 114 | signals.post_save.connect(Author.post_save, sender=Author) 115 | 116 | Finally, you can also use this small decorator to quickly create a number of 117 | signals and attach them to your :class:`~mongoengine.Document` or 118 | :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: 119 | 120 | def handler(event): 121 | """Signal decorator to allow use of callback functions as class decorators.""" 122 | 123 | def decorator(fn): 124 | def apply(cls): 125 | event.connect(fn, sender=cls) 126 | return cls 127 | 128 | fn.apply = apply 129 | return fn 130 | 131 | return decorator 132 | 133 | Using the first example of updating a modification time the code is now much 134 | cleaner looking while still allowing manual execution of the callback:: 135 | 136 | @handler(signals.pre_save) 137 | def update_modified(sender, document): 138 | document.modified = datetime.utcnow() 139 | 140 | @update_modified.apply 141 | class Record(Document): 142 | modified = DateTimeField() 143 | 144 | 145 | ReferenceFields and Signals 146 | --------------------------- 147 | 148 | Currently `reverse_delete_rules` do not trigger signals on the other part of 149 | the relationship. If this is required you must manually handle the 150 | reverse deletion. 151 | 152 | .. _blinker: http://pypi.python.org/pypi/blinker 153 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ============================== 2 | MongoEngine User Documentation 3 | ============================== 4 | 5 | **MongoEngine** is an Object-Document Mapper, written in Python for working with 6 | MongoDB. To install it, simply run 7 | 8 | .. code-block:: console 9 | 10 | $ pip install -U mongoengine 11 | 12 | :doc:`tutorial` 13 | A quick tutorial building a tumblelog to get you up and running with 14 | MongoEngine. 15 | 16 | :doc:`guide/index` 17 | The Full guide to MongoEngine - from modeling documents to storing files, 18 | from querying for data to firing signals and *everything* between. 19 | 20 | :doc:`apireference` 21 | The complete API documentation --- the innards of documents, querysets and fields. 22 | 23 | :doc:`upgrade` 24 | How to upgrade MongoEngine. 25 | 26 | :doc:`django` 27 | Using MongoEngine and Django 28 | 29 | Community 30 | --------- 31 | 32 | To get help with using MongoEngine, use the `MongoEngine Users mailing list 33 | `_ or the ever popular 34 | `stackoverflow `_. 35 | 36 | Contributing 37 | ------------ 38 | 39 | **Yes please!** We are always looking for contributions, additions and improvements. 40 | 41 | The source is available on `GitHub `_ 42 | and contributions are always encouraged. Contributions can be as simple as 43 | minor tweaks to this documentation, the website or the core. 44 | 45 | To contribute, fork the project on 46 | `GitHub `_ and send a 47 | pull request. 48 | 49 | Changes 50 | ------- 51 | 52 | See the :doc:`changelog` for a full list of changes to MongoEngine and 53 | :doc:`upgrade` for upgrade information. 54 | 55 | .. note:: Always read and test the `upgrade `_ documentation before 56 | putting updates live in production **;)** 57 | 58 | Offline Reading 59 | --------------- 60 | 61 | Download the docs in `pdf `_ 62 | or `epub `_ 63 | formats for offline reading. 64 | 65 | 66 | .. toctree:: 67 | :maxdepth: 1 68 | :numbered: 69 | :hidden: 70 | 71 | tutorial 72 | guide/index 73 | apireference 74 | changelog 75 | upgrade 76 | django 77 | 78 | Indices and tables 79 | ------------------ 80 | 81 | * :ref:`genindex` 82 | * :ref:`modindex` 83 | * :ref:`search` 84 | 85 | -------------------------------------------------------------------------------- /mongoengine/__init__.py: -------------------------------------------------------------------------------- 1 | from . import document 2 | from .document import * 3 | from . import fields 4 | from .fields import * 5 | from . import connection 6 | from .connection import * 7 | from . import queryset 8 | from .queryset import * 9 | from . import signals 10 | from .signals import * 11 | from .errors import * 12 | from . import errors 13 | 14 | __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + 15 | list(queryset.__all__) + signals.__all__ + list(errors.__all__)) 16 | 17 | VERSION = (0, 8, 2) 18 | MALLARD = True 19 | 20 | 21 | def get_version(): 22 | if isinstance(VERSION[-1], str): 23 | return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] 24 | return '.'.join(map(str, VERSION)) 25 | 26 | __version__ = get_version() 27 | -------------------------------------------------------------------------------- /mongoengine/base/__init__.py: -------------------------------------------------------------------------------- 1 | from mongoengine.base.common import * 2 | from mongoengine.base.datastructures import * 3 | from mongoengine.base.document import * 4 | from mongoengine.base.fields import * 5 | from mongoengine.base.metaclasses import * 6 | 7 | # Help with backwards compatibility 8 | from mongoengine.errors import * 9 | -------------------------------------------------------------------------------- /mongoengine/base/common.py: -------------------------------------------------------------------------------- 1 | from mongoengine.errors import NotRegistered 2 | 3 | __all__ = ('ALLOW_INHERITANCE', 'AUTO_CREATE_INDEX', 'get_document', '_document_registry') 4 | 5 | # don't allow inheritance by default 6 | ALLOW_INHERITANCE = False 7 | 8 | # don't automatically create indexes 9 | AUTO_CREATE_INDEX = False 10 | 11 | _document_registry = {} 12 | 13 | 14 | def get_document(name): 15 | doc = _document_registry.get(name, None) 16 | if not doc: 17 | # Possible old style name 18 | single_end = name.split('.')[-1] 19 | compound_end = '.%s' % single_end 20 | possible_match = [k for k in list(_document_registry.keys()) 21 | if k.endswith(compound_end) or k == single_end] 22 | if len(possible_match) == 1: 23 | doc = _document_registry.get(possible_match.pop(), None) 24 | if not doc: 25 | raise NotRegistered(""" 26 | `%s` has not been registered in the document registry. 27 | Importing the document class automatically registers it, has it 28 | been imported? 29 | """.strip() % name) 30 | return doc 31 | -------------------------------------------------------------------------------- /mongoengine/base/datastructures.py: -------------------------------------------------------------------------------- 1 | import weakref 2 | from mongoengine.common import _import_class 3 | 4 | __all__ = ("BaseDict", "BaseList") 5 | 6 | 7 | class WeakInstanceMixin(object): 8 | _instance_ref = None 9 | 10 | def _get_instance(self): 11 | return self._instance_ref and self._instance_ref() 12 | 13 | def _set_instance(self, instance): 14 | if instance is None: 15 | self._instance_ref = None 16 | else: 17 | self._instance_ref = weakref.ref(instance) 18 | 19 | _instance = property(_get_instance, _set_instance) 20 | 21 | 22 | class BaseDict(WeakInstanceMixin, dict): 23 | """A special dict so we can watch any changes 24 | """ 25 | 26 | _dereferenced = False 27 | _name = None 28 | 29 | def __init__(self, dict_items, instance, name): 30 | Document = _import_class('Document') 31 | EmbeddedDocument = _import_class('EmbeddedDocument') 32 | 33 | if isinstance(instance, (Document, EmbeddedDocument)): 34 | self._instance = instance 35 | self._name = name 36 | return super(BaseDict, self).__init__(dict_items) 37 | 38 | def __getitem__(self, *args, **kwargs): 39 | value = super(BaseDict, self).__getitem__(*args, **kwargs) 40 | 41 | EmbeddedDocument = _import_class('EmbeddedDocument') 42 | if isinstance(value, EmbeddedDocument) and value._instance is None: 43 | value._instance = self._instance 44 | return value 45 | 46 | def __setitem__(self, *args, **kwargs): 47 | self._mark_as_changed() 48 | return super(BaseDict, self).__setitem__(*args, **kwargs) 49 | 50 | def __delete__(self, *args, **kwargs): 51 | self._mark_as_changed() 52 | return super(BaseDict, self).__delete__(*args, **kwargs) 53 | 54 | def __delitem__(self, *args, **kwargs): 55 | self._mark_as_changed() 56 | return super(BaseDict, self).__delitem__(*args, **kwargs) 57 | 58 | def __delattr__(self, *args, **kwargs): 59 | self._mark_as_changed() 60 | return super(BaseDict, self).__delattr__(*args, **kwargs) 61 | 62 | def __getstate__(self): 63 | self.instance = None 64 | self._dereferenced = False 65 | return self 66 | 67 | def __setstate__(self, state): 68 | self = state 69 | return self 70 | 71 | def clear(self, *args, **kwargs): 72 | self._mark_as_changed() 73 | return super(BaseDict, self).clear(*args, **kwargs) 74 | 75 | def pop(self, *args, **kwargs): 76 | self._mark_as_changed() 77 | return super(BaseDict, self).pop(*args, **kwargs) 78 | 79 | def popitem(self, *args, **kwargs): 80 | self._mark_as_changed() 81 | return super(BaseDict, self).popitem(*args, **kwargs) 82 | 83 | def update(self, *args, **kwargs): 84 | self._mark_as_changed() 85 | return super(BaseDict, self).update(*args, **kwargs) 86 | 87 | def _mark_as_changed(self): 88 | if hasattr(self._instance, '_mark_as_changed'): 89 | self._instance._mark_as_changed(self._name) 90 | 91 | 92 | class BaseList(WeakInstanceMixin, list): 93 | """A special list so we can watch any changes 94 | """ 95 | 96 | _dereferenced = False 97 | _name = None 98 | 99 | def __init__(self, list_items, instance, name): 100 | Document = _import_class('Document') 101 | EmbeddedDocument = _import_class('EmbeddedDocument') 102 | 103 | if isinstance(instance, (Document, EmbeddedDocument)): 104 | self._instance = instance 105 | self._name = name 106 | return super(BaseList, self).__init__(list_items) 107 | 108 | def __getitem__(self, *args, **kwargs): 109 | value = super(BaseList, self).__getitem__(*args, **kwargs) 110 | 111 | EmbeddedDocument = _import_class('EmbeddedDocument') 112 | if isinstance(value, EmbeddedDocument) and value._instance is None: 113 | value._instance = self._instance 114 | return value 115 | 116 | def __setitem__(self, *args, **kwargs): 117 | self._mark_as_changed() 118 | return super(BaseList, self).__setitem__(*args, **kwargs) 119 | 120 | def __delitem__(self, *args, **kwargs): 121 | self._mark_as_changed() 122 | return super(BaseList, self).__delitem__(*args, **kwargs) 123 | 124 | def __getstate__(self): 125 | self.instance = None 126 | self._dereferenced = False 127 | return self 128 | 129 | def __setstate__(self, state): 130 | self = state 131 | return self 132 | 133 | def append(self, *args, **kwargs): 134 | self._mark_as_changed() 135 | return super(BaseList, self).append(*args, **kwargs) 136 | 137 | def extend(self, *args, **kwargs): 138 | self._mark_as_changed() 139 | return super(BaseList, self).extend(*args, **kwargs) 140 | 141 | def insert(self, *args, **kwargs): 142 | self._mark_as_changed() 143 | return super(BaseList, self).insert(*args, **kwargs) 144 | 145 | def pop(self, *args, **kwargs): 146 | self._mark_as_changed() 147 | return super(BaseList, self).pop(*args, **kwargs) 148 | 149 | def remove(self, *args, **kwargs): 150 | self._mark_as_changed() 151 | return super(BaseList, self).remove(*args, **kwargs) 152 | 153 | def reverse(self, *args, **kwargs): 154 | self._mark_as_changed() 155 | return super(BaseList, self).reverse(*args, **kwargs) 156 | 157 | def sort(self, *args, **kwargs): 158 | self._mark_as_changed() 159 | return super(BaseList, self).sort(*args, **kwargs) 160 | 161 | def _mark_as_changed(self): 162 | if hasattr(self._instance, '_mark_as_changed'): 163 | self._instance._mark_as_changed(self._name) 164 | -------------------------------------------------------------------------------- /mongoengine/base/proxy.py: -------------------------------------------------------------------------------- 1 | from mongoengine.queryset import OperationError, DoesNotExist 2 | from bson.dbref import DBRef 3 | 4 | class LocalProxy(object): 5 | # From werkzeug/local.py 6 | 7 | """ Forwards all operations to 8 | a proxied object. The only operations not supported for forwarding 9 | are right handed operands and any kind of assignment. 10 | """ 11 | 12 | __slots__ = ('__local', '__dict__', '__name__') 13 | 14 | def __init__(self, local, name=None): 15 | object.__setattr__(self, '_LocalProxy__local', local) 16 | object.__setattr__(self, '__name__', name) 17 | 18 | def _get_current_object(self): 19 | """Return the current object. This is useful if you want the real 20 | object behind the proxy at a time for performance reasons or because 21 | you want to pass the object into a different context. 22 | """ 23 | if not hasattr(self.__local, '__release_local__'): 24 | return self.__local() 25 | try: 26 | return getattr(self.__local, self.__name__) 27 | except AttributeError: 28 | raise RuntimeError('no object bound to %s' % self.__name__) 29 | 30 | @property 31 | def __dict__(self): 32 | try: 33 | return self._get_current_object().__dict__ 34 | except RuntimeError: 35 | raise AttributeError('__dict__') 36 | 37 | def __repr__(self): 38 | try: 39 | obj = self._get_current_object() 40 | except RuntimeError: 41 | return '<%s unbound>' % self.__class__.__name__ 42 | return repr(obj) 43 | 44 | def __bool__(self): 45 | try: 46 | return bool(self._get_current_object()) 47 | except RuntimeError: 48 | return False 49 | 50 | def __unicode__(self): 51 | try: 52 | return str(self._get_current_object()) 53 | except RuntimeError: 54 | return repr(self) 55 | 56 | def __dir__(self): 57 | try: 58 | return dir(self._get_current_object()) 59 | except RuntimeError: 60 | return [] 61 | 62 | def __getattr__(self, name): 63 | if name == '__members__': 64 | return dir(self._get_current_object()) 65 | return getattr(self._get_current_object(), name) 66 | 67 | def __setitem__(self, key, value): 68 | self._get_current_object()[key] = value 69 | 70 | def __delitem__(self, key): 71 | del self._get_current_object()[key] 72 | 73 | def __setslice__(self, i, j, seq): 74 | self._get_current_object()[i:j] = seq 75 | 76 | def __delslice__(self, i, j): 77 | del self._get_current_object()[i:j] 78 | 79 | __setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v) 80 | __delattr__ = lambda x, n: delattr(x._get_current_object(), n) 81 | __str__ = lambda x: str(x._get_current_object()) 82 | __lt__ = lambda x, o: x._get_current_object() < o 83 | __le__ = lambda x, o: x._get_current_object() <= o 84 | __eq__ = lambda x, o: x._get_current_object() == o 85 | __ne__ = lambda x, o: x._get_current_object() != o 86 | __gt__ = lambda x, o: x._get_current_object() > o 87 | __ge__ = lambda x, o: x._get_current_object() >= o 88 | __cmp__ = lambda x, o: cmp(x._get_current_object(), o) 89 | __hash__ = lambda x: hash(x._get_current_object()) 90 | __call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw) 91 | __len__ = lambda x: len(x._get_current_object()) 92 | __getitem__ = lambda x, i: x._get_current_object()[i] 93 | __iter__ = lambda x: iter(x._get_current_object()) 94 | __contains__ = lambda x, i: i in x._get_current_object() 95 | __getslice__ = lambda x, i, j: x._get_current_object()[i:j] 96 | __add__ = lambda x, o: x._get_current_object() + o 97 | __sub__ = lambda x, o: x._get_current_object() - o 98 | __mul__ = lambda x, o: x._get_current_object() * o 99 | __floordiv__ = lambda x, o: x._get_current_object() // o 100 | __mod__ = lambda x, o: x._get_current_object() % o 101 | __divmod__ = lambda x, o: x._get_current_object().__divmod__(o) 102 | __pow__ = lambda x, o: x._get_current_object() ** o 103 | __lshift__ = lambda x, o: x._get_current_object() << o 104 | __rshift__ = lambda x, o: x._get_current_object() >> o 105 | __and__ = lambda x, o: x._get_current_object() & o 106 | __xor__ = lambda x, o: x._get_current_object() ^ o 107 | __or__ = lambda x, o: x._get_current_object() | o 108 | __div__ = lambda x, o: x._get_current_object().__div__(o) 109 | __truediv__ = lambda x, o: x._get_current_object().__truediv__(o) 110 | __neg__ = lambda x: -(x._get_current_object()) 111 | __pos__ = lambda x: +(x._get_current_object()) 112 | __abs__ = lambda x: abs(x._get_current_object()) 113 | __invert__ = lambda x: ~(x._get_current_object()) 114 | __complex__ = lambda x: complex(x._get_current_object()) 115 | __int__ = lambda x: int(x._get_current_object()) 116 | __long__ = lambda x: int(x._get_current_object()) 117 | __float__ = lambda x: float(x._get_current_object()) 118 | __oct__ = lambda x: oct(x._get_current_object()) 119 | __hex__ = lambda x: hex(x._get_current_object()) 120 | __index__ = lambda x: x._get_current_object().__index__() 121 | __coerce__ = lambda x, o: x.__coerce__(x, o) 122 | __enter__ = lambda x: x.__enter__() 123 | __exit__ = lambda x, *a, **kw: x.__exit__(*a, **kw) 124 | 125 | 126 | class DocumentProxy(LocalProxy): 127 | __slots__ = ('__document_type', '__document', '__pk') 128 | 129 | def __init__(self, document_type, pk): 130 | object.__setattr__(self, '_DocumentProxy__document_type', document_type) 131 | object.__setattr__(self, '_DocumentProxy__document', None) 132 | object.__setattr__(self, '_DocumentProxy__pk', pk) 133 | object.__setattr__(self, document_type._meta['id_field'], self.pk) 134 | 135 | @property 136 | def __class__(self): 137 | # We need to fetch the object to determine to which class it belongs. 138 | try: 139 | return self._get_current_object().__class__ 140 | except DoesNotExist: 141 | return DocumentProxy 142 | 143 | def _lazy(): 144 | def fget(self): 145 | return self.__document._lazy if self.__document else True 146 | def fset(self, value): 147 | self._get_current_object()._lazy = value 148 | return property(fget, fset) 149 | _lazy = _lazy() 150 | 151 | # copy normally updates __dict__ which would result in errors 152 | def __setstate__(self, state): 153 | for k, v in state[1].items(): 154 | object.__setattr__(self, k, v) 155 | 156 | def _get_collection_name(self): 157 | return self.__document_type._meta.get('collection', None) 158 | 159 | def __eq__(self, other): 160 | if other and hasattr(other, '_get_collection_name') and other._get_collection_name() == self._get_collection_name() and hasattr(other, 'pk'): 161 | if self.pk == other.pk: 162 | return True 163 | return False 164 | 165 | def __ne__(self, other): 166 | return not self.__eq__(other) 167 | 168 | def to_dbref(self): 169 | """Returns an instance of :class:`~bson.dbref.DBRef` useful in 170 | `__raw__` queries.""" 171 | if not self.pk: 172 | msg = "Only saved documents can have a valid dbref" 173 | raise OperationError(msg) 174 | return DBRef(self._get_collection_name(), self.pk) 175 | 176 | def pk(): 177 | def fget(self): 178 | return self.__document.pk if self.__document else self.__pk 179 | def fset(self, value): 180 | self._get_current_object().pk = value 181 | return property(fget, fset) 182 | pk = pk() 183 | 184 | def _get_current_object(self): 185 | if self.__document == None: 186 | collection = self.__document_type._get_collection() 187 | son = collection.find_one({'_id': self.__pk}) 188 | if son is None: 189 | raise DoesNotExist(f"Document {self.pk} has been deleted.") 190 | document = self.__document_type._from_son(son) 191 | object.__setattr__(self, '_DocumentProxy__document', document) 192 | return self.__document 193 | 194 | def __bool__(self): 195 | return True 196 | -------------------------------------------------------------------------------- /mongoengine/common.py: -------------------------------------------------------------------------------- 1 | _class_registry_cache = {} 2 | 3 | 4 | def _import_class(cls_name): 5 | """Cache mechanism for imports. 6 | 7 | Due to complications of circular imports mongoengine needs to do lots of 8 | inline imports in functions. This is inefficient as classes are 9 | imported repeated throughout the mongoengine code. This is 10 | compounded by some recursive functions requiring inline imports. 11 | 12 | :mod:`mongoengine.common` provides a single point to import all these 13 | classes. Circular imports aren't an issue as it dynamically imports the 14 | class when first needed. Subsequent calls to the 15 | :func:`~mongoengine.common._import_class` can then directly retrieve the 16 | class from the :data:`mongoengine.common._class_registry_cache`. 17 | """ 18 | if cls_name in _class_registry_cache: 19 | return _class_registry_cache.get(cls_name) 20 | 21 | doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', 22 | 'MapReduceDocument') 23 | field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', 24 | 'FileField', 'GenericReferenceField', 25 | 'GenericEmbeddedDocumentField', 'GeoPointField', 26 | 'PointField', 'LineStringField', 'PolygonField', 27 | 'ReferenceField', 'StringField', 'ComplexBaseField') 28 | queryset_classes = ('OperationError',) 29 | deref_classes = ('DeReference',) 30 | 31 | if cls_name in doc_classes: 32 | from mongoengine import document as module 33 | import_classes = doc_classes 34 | elif cls_name in field_classes: 35 | from mongoengine import fields as module 36 | import_classes = field_classes 37 | elif cls_name in queryset_classes: 38 | from mongoengine import queryset as module 39 | import_classes = queryset_classes 40 | elif cls_name in deref_classes: 41 | from mongoengine import dereference as module 42 | import_classes = deref_classes 43 | else: 44 | raise ValueError('No import set for: ' % cls_name) 45 | 46 | for cls in import_classes: 47 | _class_registry_cache[cls] = getattr(module, cls) 48 | 49 | return _class_registry_cache.get(cls_name) 50 | -------------------------------------------------------------------------------- /mongoengine/connection.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | import pymongo 3 | from pymongo import MongoClient, ReadPreference, uri_parser 4 | 5 | __all__ = [ 6 | 'DEFAULT_CONNECTION_NAME', 7 | 'ConnectionError', 8 | 'connect', 9 | 'disconnect', 10 | 'get_connection', 11 | 'get_db', 12 | 'register_connection', 13 | ] 14 | 15 | 16 | DEFAULT_CONNECTION_NAME = 'default' 17 | DEFAULT_READ_PREFERENCE = ReadPreference.PRIMARY 18 | 19 | 20 | class ConnectionError(Exception): 21 | pass 22 | 23 | 24 | _connection_settings = {} 25 | _connections = {} 26 | _dbs = {} 27 | 28 | 29 | def register_connection( 30 | alias, 31 | name, 32 | host='localhost', 33 | port=27017, 34 | is_slave=False, 35 | read_preference=DEFAULT_READ_PREFERENCE, 36 | slaves=None, 37 | username=None, 38 | password=None, 39 | uuidrepresentation=None, 40 | **kwargs 41 | ): 42 | """Add a connection. 43 | 44 | :param alias: the name that will be used to refer to this connection 45 | throughout MongoEngine 46 | :param name: the name of the specific database to use 47 | :param host: the host name of the :program:`mongod` instance to connect to 48 | :param port: the port that the :program:`mongod` instance is running on 49 | :param is_slave: whether the connection can act as a slave 50 | ** Deprecated in PyMongo 2.0.1+ 51 | :param read_preference: The read preference for the collection 52 | ** Added in PyMongo 2.1 53 | :param slaves: a list of aliases of slave connections; each of these must 54 | be a registered connection that has :attr:`is_slave` set to ``True`` 55 | :param username: username to authenticate with 56 | :param password: password to authenticate with 57 | :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver 58 | 59 | """ 60 | global _connection_settings 61 | 62 | conn_settings = { 63 | 'name': name, 64 | 'host': host, 65 | 'port': port, 66 | 'is_slave': is_slave, 67 | 'slaves': slaves or [], 68 | 'username': username, 69 | 'password': password, 70 | 'read_preference': read_preference 71 | } 72 | 73 | # Handle uri style connections 74 | if "://" in host: 75 | uri_dict = uri_parser.parse_uri(host) 76 | if uri_dict.get('database') is None: 77 | raise ConnectionError("If using URI style connection include "\ 78 | "database name in string") 79 | conn_settings.update({ 80 | 'host': host, 81 | 'name': uri_dict.get('database'), 82 | 'username': uri_dict.get('username'), 83 | 'password': uri_dict.get('password'), 84 | 'read_preference': read_preference, 85 | }) 86 | if "replicaSet" in host: 87 | conn_settings['replicaSet'] = True 88 | if "uuidrepresentation" in uri_dict: 89 | uuidrepresentation = uri_dict.get('uuidrepresentation') 90 | 91 | if uuidrepresentation is None: 92 | warnings.warn( 93 | "No uuidrepresentation is specified! Falling back to " 94 | "'pythonLegacy' which is the default for pymongo 3.x. " 95 | "For compatibility with other MongoDB drivers this should be " 96 | "specified as 'standard' or '{java,csharp}Legacy' to work with " 97 | "older drivers in those languages. This will be changed to " 98 | "'unspecified' in a future release.", 99 | DeprecationWarning, 100 | stacklevel=3, 101 | ) 102 | uuidrepresentation = "pythonLegacy" 103 | conn_settings['uuidrepresentation'] = uuidrepresentation 104 | conn_settings.update(kwargs) 105 | _connection_settings[alias] = conn_settings 106 | 107 | 108 | def disconnect(alias=DEFAULT_CONNECTION_NAME): 109 | global _connections 110 | global _dbs 111 | 112 | if alias in _connections: 113 | get_connection(alias=alias).close() 114 | del _connections[alias] 115 | if alias in _dbs: 116 | del _dbs[alias] 117 | 118 | 119 | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): 120 | global _connections 121 | # Connect to the database if not already connected 122 | if reconnect: 123 | disconnect(alias) 124 | 125 | if alias not in _connections: 126 | if alias not in _connection_settings: 127 | msg = 'Connection with alias "%s" has not been defined' % alias 128 | if alias == DEFAULT_CONNECTION_NAME: 129 | msg = 'You have not defined a default connection' 130 | raise ConnectionError(msg) 131 | conn_settings = _connection_settings[alias].copy() 132 | 133 | if hasattr(pymongo, 'version_tuple'): # Support for 2.1+ 134 | conn_settings.pop('name', None) 135 | conn_settings.pop('slaves', None) 136 | conn_settings.pop('is_slave', None) 137 | else: 138 | # Get all the slave connections 139 | if 'slaves' in conn_settings: 140 | slaves = [] 141 | for slave_alias in conn_settings['slaves']: 142 | slaves.append(get_connection(slave_alias)) 143 | conn_settings['slaves'] = slaves 144 | conn_settings.pop('read_preference', None) 145 | 146 | if 'replicaSet' in conn_settings: 147 | conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) 148 | # Discard port since it can't be used with replicaSet 149 | conn_settings.pop('port', None) 150 | # Discard replicaSet if not base string 151 | if not isinstance(conn_settings['replicaSet'], str): 152 | conn_settings.pop('replicaSet', None) 153 | 154 | try: 155 | _connections[alias] = MongoClient(**conn_settings) 156 | except Exception as e: 157 | raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) 158 | return _connections[alias] 159 | 160 | 161 | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): 162 | global _dbs 163 | if reconnect: 164 | disconnect(alias) 165 | 166 | if alias not in _dbs: 167 | conn = get_connection(alias) 168 | conn_settings = _connection_settings[alias] 169 | db = conn[conn_settings['name']] 170 | _dbs[alias] = db 171 | return _dbs[alias] 172 | 173 | 174 | def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): 175 | """Connect to the database specified by the 'db' argument. 176 | 177 | Connection settings may be provided here as well if the database is not 178 | running on the default port on localhost. If authentication is needed, 179 | provide username and password arguments as well. 180 | 181 | Multiple databases are supported by using aliases. Provide a separate 182 | `alias` to connect to a different instance of :program:`mongod`. 183 | 184 | .. versionchanged:: 0.6 - added multiple database support. 185 | """ 186 | global _connections 187 | if alias not in _connections: 188 | register_connection(alias, db, **kwargs) 189 | 190 | return get_connection(alias) 191 | 192 | 193 | # Support old naming convention 194 | _get_connection = get_connection 195 | _get_db = get_db 196 | -------------------------------------------------------------------------------- /mongoengine/context_managers.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | 3 | from pymongo.write_concern import WriteConcern 4 | 5 | from mongoengine.common import _import_class 6 | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db 7 | 8 | 9 | __all__ = ("switch_db", "switch_collection", "no_dereference", 10 | "no_sub_classes", "query_counter") 11 | 12 | 13 | class switch_db(object): 14 | """ switch_db alias context manager. 15 | 16 | Example :: 17 | 18 | # Register connections 19 | register_connection('default', 'mongoenginetest') 20 | register_connection('testdb-1', 'mongoenginetest2') 21 | 22 | class Group(Document): 23 | name = StringField() 24 | 25 | Group(name="test").save() # Saves in the default db 26 | 27 | with switch_db(Group, 'testdb-1') as Group: 28 | Group(name="hello testdb!").save() # Saves in testdb-1 29 | 30 | """ 31 | 32 | def __init__(self, cls, db_alias): 33 | """ Construct the switch_db context manager 34 | 35 | :param cls: the class to change the registered db 36 | :param db_alias: the name of the specific database to use 37 | """ 38 | self.cls = cls 39 | self.collection = cls._get_collection() 40 | self.db_alias = db_alias 41 | self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) 42 | 43 | def __enter__(self): 44 | """ change the db_alias and clear the cached collection """ 45 | self.cls._meta["db_alias"] = self.db_alias 46 | self.cls._collection = None 47 | return self.cls 48 | 49 | def __exit__(self, t, value, traceback): 50 | """ Reset the db_alias and collection """ 51 | self.cls._meta["db_alias"] = self.ori_db_alias 52 | self.cls._collection = self.collection 53 | 54 | 55 | class switch_collection(object): 56 | """ switch_collection alias context manager. 57 | 58 | Example :: 59 | 60 | class Group(Document): 61 | name = StringField() 62 | 63 | Group(name="test").save() # Saves in the default db 64 | 65 | with switch_collection(Group, 'group1') as Group: 66 | Group(name="hello testdb!").save() # Saves in group1 collection 67 | 68 | """ 69 | 70 | def __init__(self, cls, collection_name): 71 | """ Construct the switch_collection context manager 72 | 73 | :param cls: the class to change the registered db 74 | :param collection_name: the name of the collection to use 75 | """ 76 | self.cls = cls 77 | self.ori_collection = cls._get_collection() 78 | self.ori_get_collection_name = cls._get_collection_name 79 | self.collection_name = collection_name 80 | 81 | def __enter__(self): 82 | """ change the _get_collection_name and clear the cached collection """ 83 | 84 | @classmethod 85 | def _get_collection_name(cls): 86 | return self.collection_name 87 | 88 | self.cls._get_collection_name = _get_collection_name 89 | self.cls._collection = None 90 | return self.cls 91 | 92 | def __exit__(self, t, value, traceback): 93 | """ Reset the collection """ 94 | self.cls._collection = self.ori_collection 95 | self.cls._get_collection_name = self.ori_get_collection_name 96 | 97 | 98 | class no_dereference(object): 99 | """ no_dereference context manager. 100 | 101 | Turns off all dereferencing in Documents for the duration of the context 102 | manager:: 103 | 104 | with no_dereference(Group) as Group: 105 | Group.objects.find() 106 | 107 | """ 108 | 109 | def __init__(self, cls): 110 | """ Construct the no_dereference context manager. 111 | 112 | :param cls: the class to turn dereferencing off on 113 | """ 114 | self.cls = cls 115 | 116 | ReferenceField = _import_class('ReferenceField') 117 | GenericReferenceField = _import_class('GenericReferenceField') 118 | ComplexBaseField = _import_class('ComplexBaseField') 119 | 120 | self.deref_fields = [k for k, v in self.cls._fields.items() 121 | if isinstance(v, (ReferenceField, 122 | GenericReferenceField, 123 | ComplexBaseField))] 124 | 125 | def __enter__(self): 126 | """ change the objects default and _auto_dereference values""" 127 | for field in self.deref_fields: 128 | self.cls._fields[field]._auto_dereference = False 129 | return self.cls 130 | 131 | def __exit__(self, t, value, traceback): 132 | """ Reset the default and _auto_dereference values""" 133 | for field in self.deref_fields: 134 | self.cls._fields[field]._auto_dereference = True 135 | return self.cls 136 | 137 | 138 | class no_sub_classes(object): 139 | """ no_sub_classes context manager. 140 | 141 | Only returns instances of this class and no sub (inherited) classes:: 142 | 143 | with no_sub_classes(Group) as Group: 144 | Group.objects.find() 145 | 146 | """ 147 | 148 | def __init__(self, cls): 149 | """ Construct the no_sub_classes context manager. 150 | 151 | :param cls: the class to turn querying sub classes on 152 | """ 153 | self.cls = cls 154 | 155 | def __enter__(self): 156 | """ change the objects default and _auto_dereference values""" 157 | self.cls._all_subclasses = self.cls._subclasses 158 | self.cls._subclasses = (self.cls,) 159 | return self.cls 160 | 161 | def __exit__(self, t, value, traceback): 162 | """ Reset the default and _auto_dereference values""" 163 | self.cls._subclasses = self.cls._all_subclasses 164 | delattr(self.cls, '_all_subclasses') 165 | return self.cls 166 | 167 | 168 | class query_counter(object): 169 | """ Query_counter context manager to get the number of queries. """ 170 | 171 | def __init__(self): 172 | """ Construct the query_counter. """ 173 | self.counter = 0 174 | self.db = get_db() 175 | 176 | def __enter__(self): 177 | """ On every with block we need to drop the profile collection. """ 178 | self.db.command({"profile": 0}) 179 | self.db.system.profile.drop() 180 | self.db.command({"profile": 2}) 181 | return self 182 | 183 | def __exit__(self, t, value, traceback): 184 | """ Reset the profiling level. """ 185 | self.db.command({"profile": 0}) 186 | 187 | def __eq__(self, value): 188 | """ == Compare querycounter. """ 189 | counter = self._get_count() 190 | return value == counter 191 | 192 | def __ne__(self, value): 193 | """ != Compare querycounter. """ 194 | return not self.__eq__(value) 195 | 196 | def __lt__(self, value): 197 | """ < Compare querycounter. """ 198 | return self._get_count() < value 199 | 200 | def __le__(self, value): 201 | """ <= Compare querycounter. """ 202 | return self._get_count() <= value 203 | 204 | def __gt__(self, value): 205 | """ > Compare querycounter. """ 206 | return self._get_count() > value 207 | 208 | def __ge__(self, value): 209 | """ >= Compare querycounter. """ 210 | return self._get_count() >= value 211 | 212 | def __int__(self): 213 | """ int representation. """ 214 | return self._get_count() 215 | 216 | def __repr__(self): 217 | """ repr query_counter as the number of queries. """ 218 | return "%s" % self._get_count() 219 | 220 | def _get_count(self): 221 | """ Get the number of queries. """ 222 | ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} 223 | count = self.db.system.profile.count_documents(filter=ignore_query) - self.counter 224 | self.counter += 1 225 | return count 226 | 227 | 228 | @contextmanager 229 | def set_write_concern(collection, write_concerns): 230 | combined_concerns = dict(list(collection.write_concern.document.items())) 231 | combined_concerns.update(write_concerns) 232 | yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) 233 | 234 | 235 | @contextmanager 236 | def set_read_write_concern(collection, write_concerns, read_concern): 237 | combined_write_concerns = dict(collection.write_concern.document.items()) 238 | combined_write_concerns.update(write_concerns) 239 | 240 | yield collection.with_options(write_concern=WriteConcern(**combined_write_concerns), read_concern=read_concern) 241 | -------------------------------------------------------------------------------- /mongoengine/dereference.py: -------------------------------------------------------------------------------- 1 | from bson import DBRef, SON 2 | 3 | from .base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) 4 | from .fields import (ReferenceField, ListField, DictField, MapField) 5 | from .connection import get_db 6 | from .queryset import QuerySet 7 | from .document import Document 8 | 9 | 10 | class DeReference(object): 11 | 12 | def __call__(self, items, max_depth=1, instance=None, name=None): 13 | """ 14 | Cheaply dereferences the items to a set depth. 15 | Also handles the convertion of complex data types. 16 | 17 | :param items: The iterable (dict, list, queryset) to be dereferenced. 18 | :param max_depth: The maximum depth to recurse to 19 | :param instance: The owning instance used for tracking changes by 20 | :class:`~mongoengine.base.ComplexBaseField` 21 | :param name: The name of the field, used for tracking changes by 22 | :class:`~mongoengine.base.ComplexBaseField` 23 | :param get: A boolean determining if being called by __get__ 24 | """ 25 | if items is None or isinstance(items, str): 26 | return items 27 | 28 | # cheapest way to convert a queryset to a list 29 | # list(queryset) uses a count() query to determine length 30 | if isinstance(items, QuerySet): 31 | items = [i for i in items] 32 | 33 | self.max_depth = max_depth 34 | doc_type = None 35 | 36 | if instance and isinstance(instance, (Document, TopLevelDocumentMetaclass)): 37 | doc_type = instance._fields.get(name) 38 | if hasattr(doc_type, 'field'): 39 | doc_type = doc_type.field 40 | 41 | if isinstance(doc_type, ReferenceField): 42 | field = doc_type 43 | doc_type = doc_type.document_type 44 | is_list = not hasattr(items, 'items') 45 | 46 | if is_list and all([i.__class__ == doc_type for i in items]): 47 | return items 48 | elif not is_list and all([i.__class__ == doc_type 49 | for i in list(items.values())]): 50 | return items 51 | elif not field.dbref: 52 | if not hasattr(items, 'items'): 53 | items = [field.to_python(v) 54 | if not isinstance(v, (DBRef, Document)) else v 55 | for v in items] 56 | else: 57 | items = dict([ 58 | (k, field.to_python(v)) 59 | if not isinstance(v, (DBRef, Document)) else (k, v) 60 | for k, v in items.items()] 61 | ) 62 | 63 | self.reference_map = self._find_references(items) 64 | self.object_map = self._fetch_objects(doc_type=doc_type) 65 | return self._attach_objects(items, 0, instance, name) 66 | 67 | def _find_references(self, items, depth=0): 68 | """ 69 | Recursively finds all db references to be dereferenced 70 | 71 | :param items: The iterable (dict, list, queryset) 72 | :param depth: The current depth of recursion 73 | """ 74 | reference_map = {} 75 | if not items or depth >= self.max_depth: 76 | return reference_map 77 | 78 | # Determine the iterator to use 79 | if not hasattr(items, 'items'): 80 | iterator = enumerate(items) 81 | else: 82 | iterator = iter(items.items()) 83 | 84 | # Recursively find dbreferences 85 | depth += 1 86 | for k, item in iterator: 87 | if isinstance(item, Document): 88 | for field_name, field in item._fields.items(): 89 | v = getattr(item, field_name) 90 | if isinstance(v, (DBRef)): 91 | reference_map.setdefault(field.document_type, []).append(v.id) 92 | elif isinstance(v, Document) and getattr(v, '_lazy', False): 93 | reference_map.setdefault(field.document_type, []).append(v.pk) 94 | elif isinstance(v, (dict, SON)) and '_ref' in v: 95 | reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) 96 | elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: 97 | field_cls = getattr(getattr(field, 'field', None), 'document_type', None) 98 | references = self._find_references(v, depth) 99 | for key, refs in references.items(): 100 | if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): 101 | key = field_cls 102 | reference_map.setdefault(key, []).extend(refs) 103 | elif isinstance(item, (DBRef)): 104 | reference_map.setdefault(item.collection, []).append(item.id) 105 | elif isinstance(item, (dict, SON)) and '_ref' in item: 106 | reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) 107 | elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: 108 | references = self._find_references(item, depth - 1) 109 | for key, refs in references.items(): 110 | reference_map.setdefault(key, []).extend(refs) 111 | 112 | return reference_map 113 | 114 | def _fetch_objects(self, doc_type=None): 115 | """Fetch all references and convert to their document objects 116 | """ 117 | object_map = {} 118 | for col, dbrefs in self.reference_map.items(): 119 | keys = list(object_map.keys()) 120 | refs = list(set([dbref for dbref in dbrefs if str(dbref).encode('utf-8') not in keys])) 121 | if hasattr(col, 'objects'): # We have a document class for the refs 122 | references = col.objects.in_bulk(refs) 123 | for key, doc in references.items(): 124 | object_map[key] = doc 125 | else: # Generic reference: use the refs data to convert to document 126 | if isinstance(doc_type, (ListField, DictField, MapField)): 127 | continue 128 | 129 | if doc_type: 130 | references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) 131 | for ref in references: 132 | doc = doc_type._from_son(ref) 133 | object_map[doc.id] = doc 134 | else: 135 | references = get_db()[col].find({'_id': {'$in': refs}}) 136 | for ref in references: 137 | if '_cls' in ref: 138 | doc = get_document(ref["_cls"])._from_son(ref) 139 | elif doc_type is None: 140 | doc = get_document( 141 | ''.join(x.capitalize() 142 | for x in col.split('_')))._from_son(ref) 143 | else: 144 | doc = doc_type._from_son(ref) 145 | object_map[doc.id] = doc 146 | return object_map 147 | 148 | def _attach_objects(self, items, depth=0, instance=None, name=None): 149 | """ 150 | Recursively finds all db references to be dereferenced 151 | 152 | :param items: The iterable (dict, list, queryset) 153 | :param depth: The current depth of recursion 154 | :param instance: The owning instance used for tracking changes by 155 | :class:`~mongoengine.base.ComplexBaseField` 156 | :param name: The name of the field, used for tracking changes by 157 | :class:`~mongoengine.base.ComplexBaseField` 158 | """ 159 | if not items: 160 | if isinstance(items, (BaseDict, BaseList)): 161 | return items 162 | 163 | if instance: 164 | if isinstance(items, dict): 165 | return BaseDict(items, instance, name) 166 | else: 167 | return BaseList(items, instance, name) 168 | 169 | if isinstance(items, (dict, SON)): 170 | if '_ref' in items: 171 | return self.object_map.get(items['_ref'].id, items) 172 | elif '_cls' in items: 173 | doc = get_document(items['_cls'])._from_son(items) 174 | doc._internal_data = self._attach_objects(doc._internal_data, depth, doc, None) 175 | return doc 176 | 177 | if not hasattr(items, 'items'): 178 | is_list = True 179 | as_tuple = isinstance(items, tuple) 180 | iterator = enumerate(items) 181 | data = [] 182 | else: 183 | is_list = False 184 | iterator = iter(items.items()) 185 | data = {} 186 | 187 | depth += 1 188 | for k, v in iterator: 189 | if is_list: 190 | data.append(v) 191 | else: 192 | data[k] = v 193 | 194 | if k in self.object_map and not is_list: 195 | data[k] = self.object_map[k] 196 | elif isinstance(v, Document): 197 | for field_name, field in v._fields.items(): 198 | v = data[k]._internal_data.get(field_name, None) 199 | if isinstance(v, (DBRef)): 200 | data[k]._internal_data[field_name] = self.object_map.get(v.id, v) 201 | elif isinstance(v, Document) and getattr(v, '_lazy', False): 202 | data[k]._internal_data[field_name] = self.object_map.get(v.pk, v) 203 | elif isinstance(v, (dict, SON)) and '_ref' in v: 204 | data[k]._internal_data[field_name] = self.object_map.get(v['_ref'].id, v) 205 | elif isinstance(v, dict) and depth <= self.max_depth: 206 | data[k]._internal_data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) 207 | elif isinstance(v, (list, tuple)) and depth <= self.max_depth: 208 | data[k]._internal_data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) 209 | elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: 210 | data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) 211 | elif hasattr(v, 'id'): 212 | data[k] = self.object_map.get(v.id, v) 213 | 214 | if instance and name: 215 | if is_list: 216 | return tuple(data) if as_tuple else BaseList(data, instance, name) 217 | return BaseDict(data, instance, name) 218 | depth += 1 219 | return data 220 | -------------------------------------------------------------------------------- /mongoengine/errors.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | from mongoengine.python_support import txt_type 4 | 5 | 6 | __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', 7 | 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', 8 | 'OperationError', 'NotUniqueError', 'ValidationError') 9 | 10 | 11 | class NotRegistered(Exception): 12 | pass 13 | 14 | 15 | class InvalidDocumentError(Exception): 16 | pass 17 | 18 | 19 | class LookUpError(AttributeError): 20 | pass 21 | 22 | 23 | class DoesNotExist(Exception): 24 | pass 25 | 26 | 27 | class MultipleObjectsReturned(Exception): 28 | pass 29 | 30 | 31 | class InvalidQueryError(Exception): 32 | pass 33 | 34 | 35 | class OperationError(Exception): 36 | pass 37 | 38 | 39 | class NotUniqueError(OperationError): 40 | pass 41 | 42 | 43 | class ValidationError(AssertionError): 44 | """Validation exception. 45 | 46 | May represent an error validating a field or a 47 | document containing fields with validation errors. 48 | 49 | :ivar errors: A dictionary of errors for fields within this 50 | document or list, or None if the error is for an 51 | individual field. 52 | """ 53 | 54 | errors = {} 55 | field_name = None 56 | _message = None 57 | 58 | def __init__(self, message="", **kwargs): 59 | self.errors = kwargs.get('errors', {}) 60 | self.field_name = kwargs.get('field_name') 61 | self.message = message 62 | 63 | def __str__(self): 64 | return txt_type(self.message) 65 | 66 | def __repr__(self): 67 | return '%s(%s,)' % (self.__class__.__name__, self.message) 68 | 69 | def __getattribute__(self, name): 70 | message = super(ValidationError, self).__getattribute__(name) 71 | if name == 'message': 72 | if self.field_name: 73 | message = '%s' % message 74 | if self.errors: 75 | message = '%s(%s)' % (message, self._format_errors()) 76 | return message 77 | 78 | def _get_message(self): 79 | return self._message 80 | 81 | def _set_message(self, message): 82 | self._message = message 83 | 84 | message = property(_get_message, _set_message) 85 | 86 | def to_dict(self): 87 | """Returns a dictionary of all errors within a document 88 | 89 | Keys are field names or list indices and values are the 90 | validation error messages, or a nested dictionary of 91 | errors for an embedded document or list. 92 | """ 93 | 94 | def build_dict(source): 95 | errors_dict = {} 96 | if not source: 97 | return errors_dict 98 | if isinstance(source, dict): 99 | for field_name, error in source.items(): 100 | errors_dict[field_name] = build_dict(error) 101 | elif isinstance(source, ValidationError) and source.errors: 102 | return build_dict(source.errors) 103 | else: 104 | return str(source) 105 | return errors_dict 106 | if not self.errors: 107 | return {} 108 | return build_dict(self.errors) 109 | 110 | def _format_errors(self): 111 | """Returns a string listing all errors within a document""" 112 | 113 | def generate_key(value, prefix=''): 114 | if isinstance(value, list): 115 | value = ' '.join([generate_key(k) for k in value]) 116 | if isinstance(value, dict): 117 | value = ' '.join( 118 | [generate_key(v, k) for k, v in value.items()]) 119 | 120 | results = "%s.%s" % (prefix, value) if prefix else value 121 | return results 122 | 123 | error_dict = defaultdict(list) 124 | for k, v in self.to_dict().items(): 125 | error_dict[generate_key(v)].append(k) 126 | return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.items()]) 127 | -------------------------------------------------------------------------------- /mongoengine/pymongo_support.py: -------------------------------------------------------------------------------- 1 | from bson import json_util, binary 2 | 3 | LEGACY_JSON_OPTIONS = json_util.LEGACY_JSON_OPTIONS.with_options( 4 | uuid_representation=binary.UuidRepresentation.PYTHON_LEGACY, 5 | ) 6 | 7 | -------------------------------------------------------------------------------- /mongoengine/python_support.py: -------------------------------------------------------------------------------- 1 | """Helper functions and types to aid with Python 2.5 - 3 support.""" 2 | 3 | import sys 4 | 5 | PY3 = sys.version_info[0] == 3 6 | PY25 = sys.version_info[:2] == (2, 5) 7 | UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264 8 | 9 | if PY3: 10 | import codecs 11 | from io import BytesIO as StringIO 12 | # return s converted to binary. b('test') should be equivalent to b'test' 13 | def b(s): 14 | return codecs.latin_1_encode(s)[0] 15 | 16 | bin_type = bytes 17 | txt_type = str 18 | else: 19 | try: 20 | from io import StringIO 21 | except ImportError: 22 | from io import StringIO 23 | 24 | # Conversion to binary only necessary in Python 3 25 | def b(s): 26 | return s 27 | 28 | bin_type = str 29 | txt_type = str 30 | 31 | str_types = (bin_type, txt_type) 32 | 33 | if PY25: 34 | def product(*args, **kwds): 35 | pools = list(map(tuple, args)) * kwds.get('repeat', 1) 36 | result = [[]] 37 | for pool in pools: 38 | result = [x + [y] for x in result for y in pool] 39 | for prod in result: 40 | yield tuple(prod) 41 | reduce = reduce 42 | else: 43 | from itertools import product 44 | from functools import reduce 45 | 46 | 47 | # For use with Python 2.5 48 | # converts all keys from unicode to str for d and all nested dictionaries 49 | def to_str_keys_recursive(d): 50 | if isinstance(d, list): 51 | for val in d: 52 | if isinstance(val, (dict, list)): 53 | to_str_keys_recursive(val) 54 | elif isinstance(d, dict): 55 | for key, val in list(d.items()): 56 | if isinstance(val, (dict, list)): 57 | to_str_keys_recursive(val) 58 | if isinstance(key, str): 59 | d[str(key)] = d.pop(key) 60 | else: 61 | raise ValueError("non list/dict parameter not allowed") 62 | -------------------------------------------------------------------------------- /mongoengine/queryset/__init__.py: -------------------------------------------------------------------------------- 1 | from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned, 2 | InvalidQueryError, OperationError, 3 | NotUniqueError) 4 | from mongoengine.queryset.field_list import * 5 | from mongoengine.queryset.manager import * 6 | from mongoengine.queryset.queryset import * 7 | from mongoengine.queryset.transform import * 8 | from mongoengine.queryset.visitor import * 9 | 10 | __all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ + 11 | transform.__all__ + visitor.__all__) 12 | -------------------------------------------------------------------------------- /mongoengine/queryset/field_list.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ('QueryFieldList',) 3 | 4 | 5 | class QueryFieldList(object): 6 | """Object that handles combinations of .only() and .exclude() calls""" 7 | ONLY = 1 8 | EXCLUDE = 0 9 | 10 | def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): 11 | """The QueryFieldList builder 12 | 13 | :param fields: A list of fields used in `.only()` or `.exclude()` 14 | :param value: How to handle the fields; either `ONLY` or `EXCLUDE` 15 | :param always_include: Any fields to always_include eg `_cls` 16 | :param _only_called: Has `.only()` been called? If so its a set of fields 17 | otherwise it performs a union. 18 | """ 19 | self.value = value 20 | self.fields = set(fields or []) 21 | self.always_include = set(always_include or []) 22 | self._id = None 23 | self._only_called = _only_called 24 | self.slice = {} 25 | 26 | def __add__(self, f): 27 | if isinstance(f.value, dict): 28 | for field in f.fields: 29 | self.slice[field] = f.value 30 | if not self.fields: 31 | self.fields = f.fields 32 | elif not self.fields: 33 | self.fields = f.fields 34 | self.value = f.value 35 | self.slice = {} 36 | elif self.value is self.ONLY and f.value is self.ONLY: 37 | self._clean_slice() 38 | if self._only_called: 39 | self.fields = self.fields.union(f.fields) 40 | else: 41 | self.fields = f.fields 42 | elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: 43 | self.fields = self.fields.union(f.fields) 44 | self._clean_slice() 45 | elif self.value is self.ONLY and f.value is self.EXCLUDE: 46 | self.fields -= f.fields 47 | self._clean_slice() 48 | elif self.value is self.EXCLUDE and f.value is self.ONLY: 49 | self.value = self.ONLY 50 | self.fields = f.fields - self.fields 51 | self._clean_slice() 52 | 53 | if '_id' in f.fields: 54 | self._id = f.value 55 | 56 | if self.always_include: 57 | if self.value is self.ONLY and self.fields: 58 | self.fields = self.fields.union(self.always_include) 59 | else: 60 | self.fields -= self.always_include 61 | 62 | if getattr(f, '_only_called', False): 63 | self._only_called = True 64 | return self 65 | 66 | def __bool__(self): 67 | return bool(self.fields) 68 | 69 | def as_dict(self): 70 | field_list = dict((field, self.value) for field in self.fields) 71 | if self.slice: 72 | field_list.update(self.slice) 73 | if self._id is not None: 74 | field_list['_id'] = self._id 75 | return field_list 76 | 77 | def reset(self): 78 | self.fields = set([]) 79 | self.slice = {} 80 | self.value = self.ONLY 81 | 82 | def _clean_slice(self): 83 | if self.slice: 84 | for field in set(self.slice.keys()) - self.fields: 85 | del self.slice[field] 86 | -------------------------------------------------------------------------------- /mongoengine/queryset/manager.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from mongoengine.queryset.queryset import QuerySet 3 | 4 | __all__ = ('queryset_manager', 'QuerySetManager') 5 | 6 | 7 | class QuerySetManager(object): 8 | """ 9 | The default QuerySet Manager. 10 | 11 | Custom QuerySet Manager functions can extend this class and users can 12 | add extra queryset functionality. Any custom manager methods must accept a 13 | :class:`~mongoengine.Document` class as its first argument, and a 14 | :class:`~mongoengine.queryset.QuerySet` as its second argument. 15 | 16 | The method function should return a :class:`~mongoengine.queryset.QuerySet` 17 | , probably the same one that was passed in, but modified in some way. 18 | """ 19 | 20 | get_queryset = None 21 | default = QuerySet 22 | 23 | def __init__(self, queryset_func=None): 24 | if queryset_func: 25 | self.get_queryset = queryset_func 26 | 27 | def __get__(self, instance, owner): 28 | """Descriptor for instantiating a new QuerySet object when 29 | Document.objects is accessed. 30 | """ 31 | if instance is not None: 32 | # Document class being used rather than a document object 33 | return self 34 | 35 | # owner is the document that contains the QuerySetManager 36 | queryset_class = owner._meta.get('queryset_class', self.default) 37 | queryset = queryset_class(owner, owner._get_collection()) 38 | if self.get_queryset: 39 | arg_count = self.get_queryset.__code__.co_argcount 40 | if arg_count == 1: 41 | queryset = self.get_queryset(queryset) 42 | elif arg_count == 2: 43 | queryset = self.get_queryset(owner, queryset) 44 | else: 45 | queryset = partial(self.get_queryset, owner, queryset) 46 | return queryset 47 | 48 | 49 | def queryset_manager(func): 50 | """Decorator that allows you to define custom QuerySet managers on 51 | :class:`~mongoengine.Document` classes. The manager must be a function that 52 | accepts a :class:`~mongoengine.Document` class as its first argument, and a 53 | :class:`~mongoengine.queryset.QuerySet` as its second argument. The method 54 | function should return a :class:`~mongoengine.queryset.QuerySet`, probably 55 | the same one that was passed in, but modified in some way. 56 | """ 57 | return QuerySetManager(func) 58 | -------------------------------------------------------------------------------- /mongoengine/queryset/visitor.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | from mongoengine.errors import InvalidQueryError 4 | from mongoengine.python_support import product, reduce 5 | 6 | from mongoengine.queryset import transform 7 | 8 | __all__ = ('Q',) 9 | 10 | 11 | class QNodeVisitor(object): 12 | """Base visitor class for visiting Q-object nodes in a query tree. 13 | """ 14 | 15 | def visit_combination(self, combination): 16 | """Called by QCombination objects. 17 | """ 18 | return combination 19 | 20 | def visit_query(self, query): 21 | """Called by (New)Q objects. 22 | """ 23 | return query 24 | 25 | 26 | class DuplicateQueryConditionsError(InvalidQueryError): 27 | pass 28 | 29 | 30 | class SimplificationVisitor(QNodeVisitor): 31 | """Simplifies query trees by combining unnecessary 'and' connection nodes 32 | into a single Q-object. 33 | """ 34 | 35 | def visit_combination(self, combination): 36 | if combination.operation == combination.AND: 37 | # The simplification only applies to 'simple' queries 38 | if all(isinstance(node, Q) for node in combination.children): 39 | queries = [n.query for n in combination.children] 40 | try: 41 | return Q(**self._query_conjunction(queries)) 42 | except DuplicateQueryConditionsError: 43 | # Cannot be simplified 44 | pass 45 | return combination 46 | 47 | def _query_conjunction(self, queries): 48 | """Merges query dicts - effectively &ing them together. 49 | """ 50 | query_ops = set() 51 | combined_query = {} 52 | for query in queries: 53 | ops = set(query.keys()) 54 | # Make sure that the same operation isn't applied more than once 55 | # to a single field 56 | intersection = ops.intersection(query_ops) 57 | if intersection: 58 | raise DuplicateQueryConditionsError() 59 | 60 | query_ops.update(ops) 61 | combined_query.update(copy.deepcopy(query)) 62 | return combined_query 63 | 64 | 65 | class QueryCompilerVisitor(QNodeVisitor): 66 | """Compiles the nodes in a query tree to a PyMongo-compatible query 67 | dictionary. 68 | """ 69 | 70 | def __init__(self, document): 71 | self.document = document 72 | 73 | def visit_combination(self, combination): 74 | operator = "$and" 75 | if combination.operation == combination.OR: 76 | keys = set([key for q in combination.children for key in list(q.keys())]) 77 | if len(keys) == 1: 78 | field = keys.pop() 79 | if not field.startswith('$') and not any([isinstance(q[field], dict) for q in combination.children]): 80 | return { 81 | field: { 82 | '$in': [q[field] for q in combination.children if field in q] 83 | } 84 | } 85 | 86 | operator = "$or" 87 | return {operator: combination.children} 88 | 89 | def visit_query(self, query): 90 | return transform.query(self.document, **query.query) 91 | 92 | 93 | class QNode(object): 94 | """Base class for nodes in query trees. 95 | """ 96 | 97 | AND = 0 98 | OR = 1 99 | 100 | def to_query(self, document): 101 | query = self.accept(SimplificationVisitor()) 102 | query = query.accept(QueryCompilerVisitor(document)) 103 | return query 104 | 105 | def accept(self, visitor): 106 | raise NotImplementedError 107 | 108 | def _combine(self, other, operation): 109 | """Combine this node with another node into a QCombination object. 110 | """ 111 | if getattr(other, 'empty', True): 112 | return self 113 | 114 | if self.empty: 115 | return other 116 | 117 | return QCombination(operation, [self, other]) 118 | 119 | @property 120 | def empty(self): 121 | return False 122 | 123 | def __or__(self, other): 124 | return self._combine(other, self.OR) 125 | 126 | def __and__(self, other): 127 | return self._combine(other, self.AND) 128 | 129 | 130 | class QCombination(QNode): 131 | """Represents the combination of several conditions by a given logical 132 | operator. 133 | """ 134 | 135 | def __init__(self, operation, children): 136 | self.operation = operation 137 | self.children = [] 138 | for node in children: 139 | # If the child is a combination of the same type, we can merge its 140 | # children directly into this combinations children 141 | if isinstance(node, QCombination) and node.operation == operation: 142 | self.children += node.children 143 | else: 144 | self.children.append(node) 145 | 146 | def accept(self, visitor): 147 | for i in range(len(self.children)): 148 | if isinstance(self.children[i], QNode): 149 | self.children[i] = self.children[i].accept(visitor) 150 | 151 | return visitor.visit_combination(self) 152 | 153 | @property 154 | def empty(self): 155 | return not bool(self.children) 156 | 157 | 158 | class Q(QNode): 159 | """A simple query object, used in a query tree to build up more complex 160 | query structures. 161 | """ 162 | 163 | def __init__(self, **query): 164 | # Support for werkzeug.local.LocalProxy 165 | for key, value in list(query.items()): 166 | if hasattr(value, '_get_current_object'): 167 | query[key] = value._get_current_object() 168 | 169 | self.query = query 170 | 171 | def accept(self, visitor): 172 | return visitor.visit_query(self) 173 | 174 | @property 175 | def empty(self): 176 | return not bool(self.query) 177 | -------------------------------------------------------------------------------- /mongoengine/signals.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | __all__ = ['pre_save', 'post_save', 'pre_delete', 'post_delete'] 4 | 5 | signals_available = False 6 | try: 7 | from blinker import Namespace 8 | signals_available = True 9 | except ImportError: 10 | class Namespace(object): 11 | def signal(self, name, doc=None): 12 | return _FakeSignal(name, doc) 13 | 14 | class _FakeSignal(object): 15 | """If blinker is unavailable, create a fake class with the same 16 | interface that allows sending of signals but will fail with an 17 | error on anything else. Instead of doing anything on send, it 18 | will just ignore the arguments and do nothing instead. 19 | """ 20 | 21 | def __init__(self, name, doc=None): 22 | self.name = name 23 | self.__doc__ = doc 24 | 25 | def _fail(self, *args, **kwargs): 26 | raise RuntimeError('signalling support is unavailable ' 27 | 'because the blinker library is ' 28 | 'not installed.') 29 | send = lambda *a, **kw: None 30 | connect = disconnect = has_receivers_for = receivers_for = \ 31 | temporarily_connected_to = _fail 32 | del _fail 33 | 34 | # the namespace for code signals. If you are not mongoengine code, do 35 | # not put signals in here. Create your own namespace instead. 36 | _signals = Namespace() 37 | 38 | pre_save = _signals.signal('pre_save') 39 | post_save = _signals.signal('post_save') 40 | pre_delete = _signals.signal('pre_delete') 41 | post_delete = _signals.signal('post_delete') 42 | pre_bulk_insert = _signals.signal('pre_bulk_insert') 43 | post_bulk_insert = _signals.signal('post_bulk_insert') 44 | -------------------------------------------------------------------------------- /python-mongoengine.spec: -------------------------------------------------------------------------------- 1 | # sitelib for noarch packages, sitearch for others (remove the unneeded one) 2 | %{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} 3 | %{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} 4 | 5 | %define srcname mongoengine 6 | 7 | Name: python-%{srcname} 8 | Version: 0.8.2 9 | Release: 1%{?dist} 10 | Summary: A Python Document-Object Mapper for working with MongoDB 11 | 12 | Group: Development/Libraries 13 | License: MIT 14 | URL: https://github.com/MongoEngine/mongoengine 15 | Source0: %{srcname}-%{version}.tar.bz2 16 | 17 | BuildRequires: python-devel 18 | BuildRequires: python-setuptools 19 | 20 | Requires: mongodb 21 | Requires: pymongo 22 | Requires: python-blinker 23 | Requires: python-imaging 24 | 25 | 26 | %description 27 | MongoEngine is an ORM-like layer on top of PyMongo. 28 | 29 | %prep 30 | %setup -q -n %{srcname}-%{version} 31 | 32 | 33 | %build 34 | # Remove CFLAGS=... for noarch packages (unneeded) 35 | CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build 36 | 37 | 38 | %install 39 | rm -rf $RPM_BUILD_ROOT 40 | %{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT 41 | 42 | %clean 43 | rm -rf $RPM_BUILD_ROOT 44 | 45 | %files 46 | %defattr(-,root,root,-) 47 | %doc docs AUTHORS LICENSE README.rst 48 | # For noarch packages: sitelib 49 | %{python_sitelib}/* 50 | # For arch-specific packages: sitearch 51 | # %{python_sitearch}/* 52 | 53 | %changelog 54 | * See: http://docs.mongoengine.org/en/latest/changelog.html -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pymongo==4.2.0 2 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [nosetests] 2 | verbosity = 3 3 | detailed-errors = 1 4 | #with-coverage = 1 5 | #cover-erase = 1 6 | #cover-html = 1 7 | #cover-html-dir = ../htmlcov 8 | #cover-package = mongoengine 9 | where = tests 10 | #tests = document/__init__.py 11 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | from setuptools import setup, find_packages 4 | 5 | # Hack to silence atexit traceback in newer python versions 6 | try: 7 | import multiprocessing 8 | except ImportError: 9 | pass 10 | 11 | DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \ 12 | 'Mapper for working with MongoDB.' 13 | LONG_DESCRIPTION = None 14 | try: 15 | LONG_DESCRIPTION = open('README.rst').read() 16 | except: 17 | pass 18 | 19 | 20 | def get_version(version_tuple): 21 | if not isinstance(version_tuple[-1], int): 22 | return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] 23 | return '.'.join(map(str, version_tuple)) 24 | 25 | # Dirty hack to get version number from monogengine/__init__.py - we can't 26 | # import it as it depends on PyMongo and PyMongo isn't installed until this 27 | # file is read 28 | init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') 29 | version_line = list([l for l in open(init) if l.startswith('VERSION')])[0] 30 | 31 | VERSION = get_version(eval(version_line.split('=')[-1])) 32 | print(VERSION) 33 | 34 | CLASSIFIERS = [ 35 | 'Development Status :: 4 - Beta', 36 | 'Intended Audience :: Developers', 37 | 'License :: OSI Approved :: MIT License', 38 | 'Operating System :: OS Independent', 39 | 'Programming Language :: Python', 40 | "Programming Language :: Python :: 3", 41 | "Programming Language :: Python :: 3.1", 42 | "Programming Language :: Python :: 3.2", 43 | "Programming Language :: Python :: 3.7", 44 | "Programming Language :: Python :: 3.8", 45 | "Programming Language :: Python :: Implementation :: CPython", 46 | 'Topic :: Database', 47 | 'Topic :: Software Development :: Libraries :: Python Modules', 48 | ] 49 | 50 | extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} 51 | 52 | assert sys.version_info[0] == 3 53 | 54 | setup(name='mongoengine', 55 | version=VERSION, 56 | author='Harry Marr', 57 | author_email='harry.marr@{nospam}gmail.com', 58 | maintainer="Ross Lawley", 59 | maintainer_email="ross.lawley@{nospam}gmail.com", 60 | url='http://mongoengine.org/', 61 | download_url='https://github.com/MongoEngine/mongoengine/tarball/master', 62 | license='MIT', 63 | include_package_data=True, 64 | description=DESCRIPTION, 65 | long_description=LONG_DESCRIPTION, 66 | platforms=['any'], 67 | classifiers=CLASSIFIERS, 68 | install_requires=['pymongo>=3.7,<5.0'], 69 | test_suite='nose.collector', 70 | **extra_opts 71 | ) 72 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | pytest 3 | nose 4 | coverage 5 | blinker 6 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/closeio/mongoengine/41d1bf8cc0d74170bf4a7669da08aa77dfbc11b1/tests/__init__.py -------------------------------------------------------------------------------- /tests/document/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/closeio/mongoengine/41d1bf8cc0d74170bf4a7669da08aa77dfbc11b1/tests/document/__init__.py -------------------------------------------------------------------------------- /tests/document/test_class_methods.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | sys.path[0:0] = [""] 4 | import unittest 5 | 6 | from mongoengine import * 7 | 8 | from mongoengine.queryset import NULLIFY, PULL 9 | from mongoengine.connection import get_db 10 | 11 | __all__ = ("ClassMethodsTest", ) 12 | 13 | 14 | class ClassMethodsTest(unittest.TestCase): 15 | 16 | def setUp(self): 17 | connect(db='mongoenginetest') 18 | self.db = get_db() 19 | 20 | class Person(Document): 21 | name = StringField() 22 | age = IntField() 23 | 24 | non_field = True 25 | 26 | meta = {"allow_inheritance": True} 27 | 28 | self.Person = Person 29 | 30 | def tearDown(self): 31 | for collection in self.db.list_collection_names(): 32 | if 'system.' in collection: 33 | continue 34 | self.db.drop_collection(collection) 35 | 36 | def test_definition(self): 37 | """Ensure that document may be defined using fields. 38 | """ 39 | self.assertEqual(['age', 'id', 'name'], 40 | sorted(self.Person._fields.keys())) 41 | self.assertEqual(["IntField", "ObjectIdField", "StringField"], 42 | sorted([x.__class__.__name__ for x in 43 | list(self.Person._fields.values())])) 44 | 45 | def test_get_db(self): 46 | """Ensure that get_db returns the expected db. 47 | """ 48 | db = self.Person._get_db() 49 | self.assertEqual(self.db, db) 50 | 51 | def test_get_collection_name(self): 52 | """Ensure that get_collection_name returns the expected collection 53 | name. 54 | """ 55 | collection_name = 'person' 56 | self.assertEqual(collection_name, self.Person._get_collection_name()) 57 | 58 | def test_get_collection(self): 59 | """Ensure that get_collection returns the expected collection. 60 | """ 61 | collection_name = 'person' 62 | collection = self.Person._get_collection() 63 | self.assertEqual(self.db[collection_name], collection) 64 | 65 | def test_drop_collection(self): 66 | """Ensure that the collection may be dropped from the database. 67 | """ 68 | collection_name = 'person' 69 | self.Person(name='Test').save() 70 | self.assertTrue(collection_name in self.db.list_collection_names()) 71 | 72 | self.Person.drop_collection() 73 | self.assertFalse(collection_name in self.db.list_collection_names()) 74 | 75 | def test_register_delete_rule(self): 76 | """Ensure that register delete rule adds a delete rule to the document 77 | meta. 78 | """ 79 | class Job(Document): 80 | employee = ReferenceField(self.Person) 81 | 82 | self.assertEqual(self.Person._meta.get('delete_rules'), None) 83 | 84 | self.Person.register_delete_rule(Job, 'employee', NULLIFY) 85 | self.assertEqual(self.Person._meta['delete_rules'], 86 | {(Job, 'employee'): NULLIFY}) 87 | 88 | def test_list_indexes_inheritance(self): 89 | """ ensure that all of the indexes are listed regardless of the super- 90 | or sub-class that we call it from 91 | """ 92 | 93 | class BlogPost(Document): 94 | author = StringField() 95 | title = StringField() 96 | description = StringField() 97 | 98 | meta = { 99 | 'allow_inheritance': True 100 | } 101 | 102 | class BlogPostWithTags(BlogPost): 103 | tags = StringField() 104 | 105 | meta = { 106 | 'indexes': [('author', 'tags')] 107 | } 108 | 109 | class BlogPostWithTagsAndExtraText(BlogPostWithTags): 110 | extra_text = StringField() 111 | 112 | meta = { 113 | 'indexes': [('author', 'tags', 'extra_text')] 114 | } 115 | 116 | BlogPost.drop_collection() 117 | 118 | BlogPost.ensure_indexes() 119 | BlogPostWithTags.ensure_indexes() 120 | BlogPostWithTagsAndExtraText.ensure_indexes() 121 | 122 | self.assertEqual(BlogPost.list_indexes(), 123 | BlogPostWithTags.list_indexes()) 124 | self.assertEqual(BlogPost.list_indexes(), 125 | BlogPostWithTagsAndExtraText.list_indexes()) 126 | self.assertEqual(BlogPost.list_indexes(), [ 127 | { 'key': [('_cls', 1), ('author', 1), ('tags', 1)] }, 128 | { 'key': [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)] }, 129 | { 'key': [('_id', 1)] }, 130 | { 'key': [('_cls', 1)] }, 131 | ]) 132 | 133 | def test_register_delete_rule_inherited(self): 134 | 135 | class Vaccine(Document): 136 | name = StringField(required=True) 137 | 138 | meta = {"indexes": ["name"]} 139 | 140 | class Animal(Document): 141 | family = StringField(required=True) 142 | vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL)) 143 | 144 | meta = {"allow_inheritance": True, "indexes": ["family"]} 145 | 146 | class Cat(Animal): 147 | name = StringField(required=True) 148 | 149 | self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL) 150 | self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL) 151 | 152 | def test_collection_naming(self): 153 | """Ensure that a collection with a specified name may be used. 154 | """ 155 | 156 | class DefaultNamingTest(Document): 157 | pass 158 | self.assertEqual('default_naming_test', 159 | DefaultNamingTest._get_collection_name()) 160 | 161 | class CustomNamingTest(Document): 162 | meta = {'collection': 'pimp_my_collection'} 163 | 164 | self.assertEqual('pimp_my_collection', 165 | CustomNamingTest._get_collection_name()) 166 | 167 | class DynamicNamingTest(Document): 168 | meta = {'collection': lambda c: "DYNAMO"} 169 | self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) 170 | 171 | # Use Abstract class to handle backwards compatibility 172 | class BaseDocument(Document): 173 | meta = { 174 | 'abstract': True, 175 | 'collection': lambda c: c.__name__.lower() 176 | } 177 | 178 | class OldNamingConvention(BaseDocument): 179 | pass 180 | self.assertEqual('oldnamingconvention', 181 | OldNamingConvention._get_collection_name()) 182 | 183 | class InheritedAbstractNamingTest(BaseDocument): 184 | meta = {'collection': 'wibble'} 185 | self.assertEqual('wibble', 186 | InheritedAbstractNamingTest._get_collection_name()) 187 | 188 | # Mixin tests 189 | class BaseMixin(object): 190 | meta = { 191 | 'collection': lambda c: c.__name__.lower() 192 | } 193 | 194 | class OldMixinNamingConvention(Document, BaseMixin): 195 | pass 196 | self.assertEqual('oldmixinnamingconvention', 197 | OldMixinNamingConvention._get_collection_name()) 198 | 199 | class BaseMixin(object): 200 | meta = { 201 | 'collection': lambda c: c.__name__.lower() 202 | } 203 | 204 | class BaseDocument(Document, BaseMixin): 205 | meta = {'allow_inheritance': True} 206 | 207 | class MyDocument(BaseDocument): 208 | pass 209 | 210 | self.assertEqual('basedocument', MyDocument._get_collection_name()) 211 | 212 | def test_custom_collection_name_operations(self): 213 | """Ensure that a collection with a specified name is used as expected. 214 | """ 215 | collection_name = 'personCollTest' 216 | 217 | class Person(Document): 218 | name = StringField() 219 | meta = {'collection': collection_name} 220 | 221 | Person(name="Test User").save() 222 | self.assertTrue(collection_name in self.db.list_collection_names()) 223 | 224 | user_obj = self.db[collection_name].find_one() 225 | self.assertEqual(user_obj['name'], "Test User") 226 | 227 | user_obj = Person.objects[0] 228 | self.assertEqual(user_obj.name, "Test User") 229 | 230 | Person.drop_collection() 231 | self.assertFalse(collection_name in self.db.list_collection_names()) 232 | 233 | def test_collection_name_and_primary(self): 234 | """Ensure that a collection with a specified name may be used. 235 | """ 236 | 237 | class Person(Document): 238 | name = StringField(primary_key=True) 239 | meta = {'collection': 'app'} 240 | 241 | Person(name="Test User").save() 242 | 243 | user_obj = Person.objects.first() 244 | self.assertEqual(user_obj.name, "Test User") 245 | 246 | Person.drop_collection() 247 | 248 | 249 | if __name__ == '__main__': 250 | unittest.main() 251 | -------------------------------------------------------------------------------- /tests/document/test_dynamic.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | sys.path[0:0] = [""] 4 | 5 | from mongoengine import * 6 | from mongoengine.connection import get_db 7 | 8 | __all__ = ("DynamicTest", ) 9 | 10 | 11 | @unittest.skip("DynamicDocument not implemented") 12 | class DynamicTest(unittest.TestCase): 13 | 14 | def setUp(self): 15 | connect(db='mongoenginetest') 16 | self.db = get_db() 17 | 18 | class Person(DynamicDocument): 19 | name = StringField() 20 | meta = {'allow_inheritance': True} 21 | 22 | Person.drop_collection() 23 | 24 | self.Person = Person 25 | 26 | def test_simple_dynamic_document(self): 27 | """Ensures simple dynamic documents are saved correctly""" 28 | 29 | p = self.Person() 30 | p.name = "James" 31 | p.age = 34 32 | 33 | self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", 34 | "age": 34}) 35 | self.assertEqual(list(p.to_mongo().keys()), ["_cls", "name", "age"]) 36 | p.save() 37 | self.assertEqual(list(p.to_mongo().keys()), ["_id", "_cls", "name", "age"]) 38 | 39 | self.assertEqual(self.Person.objects.first().age, 34) 40 | 41 | # Confirm no changes to self.Person 42 | self.assertFalse(hasattr(self.Person, 'age')) 43 | 44 | def test_change_scope_of_variable(self): 45 | """Test changing the scope of a dynamic field has no adverse effects""" 46 | p = self.Person() 47 | p.name = "Dean" 48 | p.misc = 22 49 | p.save() 50 | 51 | p = self.Person.objects.get() 52 | p.misc = {'hello': 'world'} 53 | p.save() 54 | 55 | p = self.Person.objects.get() 56 | self.assertEqual(p.misc, {'hello': 'world'}) 57 | 58 | def test_delete_dynamic_field(self): 59 | """Test deleting a dynamic field works""" 60 | self.Person.drop_collection() 61 | p = self.Person() 62 | p.name = "Dean" 63 | p.misc = 22 64 | p.save() 65 | 66 | p = self.Person.objects.get() 67 | p.misc = {'hello': 'world'} 68 | p.save() 69 | 70 | p = self.Person.objects.get() 71 | self.assertEqual(p.misc, {'hello': 'world'}) 72 | collection = self.db[self.Person._get_collection_name()] 73 | obj = collection.find_one() 74 | self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) 75 | 76 | del(p.misc) 77 | p.save() 78 | 79 | p = self.Person.objects.get() 80 | self.assertFalse(hasattr(p, 'misc')) 81 | 82 | obj = collection.find_one() 83 | self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) 84 | 85 | def test_dynamic_document_queries(self): 86 | """Ensure we can query dynamic fields""" 87 | p = self.Person() 88 | p.name = "Dean" 89 | p.age = 22 90 | p.save() 91 | 92 | self.assertEqual(1, self.Person.objects(age=22).count()) 93 | p = self.Person.objects(age=22) 94 | p = p.get() 95 | self.assertEqual(22, p.age) 96 | 97 | def test_complex_dynamic_document_queries(self): 98 | class Person(DynamicDocument): 99 | name = StringField() 100 | 101 | Person.drop_collection() 102 | 103 | p = Person(name="test") 104 | p.age = "ten" 105 | p.save() 106 | 107 | p1 = Person(name="test1") 108 | p1.age = "less then ten and a half" 109 | p1.save() 110 | 111 | p2 = Person(name="test2") 112 | p2.age = 10 113 | p2.save() 114 | 115 | self.assertEqual(Person.objects(age__icontains='ten').count(), 2) 116 | self.assertEqual(Person.objects(age__gte=10).count(), 1) 117 | 118 | def test_complex_data_lookups(self): 119 | """Ensure you can query dynamic document dynamic fields""" 120 | p = self.Person() 121 | p.misc = {'hello': 'world'} 122 | p.save() 123 | 124 | self.assertEqual(1, self.Person.objects(misc__hello='world').count()) 125 | 126 | def test_complex_embedded_document_validation(self): 127 | """Ensure embedded dynamic documents may be validated""" 128 | class Embedded(DynamicEmbeddedDocument): 129 | content = URLField() 130 | 131 | class Doc(DynamicDocument): 132 | pass 133 | 134 | Doc.drop_collection() 135 | doc = Doc() 136 | 137 | embedded_doc_1 = Embedded(content='http://mongoengine.org') 138 | embedded_doc_1.validate() 139 | 140 | embedded_doc_2 = Embedded(content='this is not a url') 141 | self.assertRaises(ValidationError, embedded_doc_2.validate) 142 | 143 | doc.embedded_field_1 = embedded_doc_1 144 | doc.embedded_field_2 = embedded_doc_2 145 | self.assertRaises(ValidationError, doc.validate) 146 | 147 | def test_inheritance(self): 148 | """Ensure that dynamic document plays nice with inheritance""" 149 | class Employee(self.Person): 150 | salary = IntField() 151 | 152 | Employee.drop_collection() 153 | 154 | self.assertTrue('name' in Employee._fields) 155 | self.assertTrue('salary' in Employee._fields) 156 | self.assertEqual(Employee._get_collection_name(), 157 | self.Person._get_collection_name()) 158 | 159 | joe_bloggs = Employee() 160 | joe_bloggs.name = "Joe Bloggs" 161 | joe_bloggs.salary = 10 162 | joe_bloggs.age = 20 163 | joe_bloggs.save() 164 | 165 | self.assertEqual(1, self.Person.objects(age=20).count()) 166 | self.assertEqual(1, Employee.objects(age=20).count()) 167 | 168 | joe_bloggs = self.Person.objects.first() 169 | self.assertTrue(isinstance(joe_bloggs, Employee)) 170 | 171 | def test_embedded_dynamic_document(self): 172 | """Test dynamic embedded documents""" 173 | class Embedded(DynamicEmbeddedDocument): 174 | pass 175 | 176 | class Doc(DynamicDocument): 177 | pass 178 | 179 | Doc.drop_collection() 180 | doc = Doc() 181 | 182 | embedded_1 = Embedded() 183 | embedded_1.string_field = 'hello' 184 | embedded_1.int_field = 1 185 | embedded_1.dict_field = {'hello': 'world'} 186 | embedded_1.list_field = ['1', 2, {'hello': 'world'}] 187 | doc.embedded_field = embedded_1 188 | 189 | self.assertEqual(doc.to_mongo(), { 190 | "embedded_field": { 191 | "_cls": "Embedded", 192 | "string_field": "hello", 193 | "int_field": 1, 194 | "dict_field": {"hello": "world"}, 195 | "list_field": ['1', 2, {'hello': 'world'}] 196 | } 197 | }) 198 | doc.save() 199 | 200 | doc = Doc.objects.first() 201 | self.assertEqual(doc.embedded_field.__class__, Embedded) 202 | self.assertEqual(doc.embedded_field.string_field, "hello") 203 | self.assertEqual(doc.embedded_field.int_field, 1) 204 | self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) 205 | self.assertEqual(doc.embedded_field.list_field, 206 | ['1', 2, {'hello': 'world'}]) 207 | 208 | def test_complex_embedded_documents(self): 209 | """Test complex dynamic embedded documents setups""" 210 | class Embedded(DynamicEmbeddedDocument): 211 | pass 212 | 213 | class Doc(DynamicDocument): 214 | pass 215 | 216 | Doc.drop_collection() 217 | doc = Doc() 218 | 219 | embedded_1 = Embedded() 220 | embedded_1.string_field = 'hello' 221 | embedded_1.int_field = 1 222 | embedded_1.dict_field = {'hello': 'world'} 223 | 224 | embedded_2 = Embedded() 225 | embedded_2.string_field = 'hello' 226 | embedded_2.int_field = 1 227 | embedded_2.dict_field = {'hello': 'world'} 228 | embedded_2.list_field = ['1', 2, {'hello': 'world'}] 229 | 230 | embedded_1.list_field = ['1', 2, embedded_2] 231 | doc.embedded_field = embedded_1 232 | 233 | self.assertEqual(doc.to_mongo(), { 234 | "embedded_field": { 235 | "_cls": "Embedded", 236 | "string_field": "hello", 237 | "int_field": 1, 238 | "dict_field": {"hello": "world"}, 239 | "list_field": ['1', 2, 240 | {"_cls": "Embedded", 241 | "string_field": "hello", 242 | "int_field": 1, 243 | "dict_field": {"hello": "world"}, 244 | "list_field": ['1', 2, {'hello': 'world'}]} 245 | ] 246 | } 247 | }) 248 | doc.save() 249 | doc = Doc.objects.first() 250 | self.assertEqual(doc.embedded_field.__class__, Embedded) 251 | self.assertEqual(doc.embedded_field.string_field, "hello") 252 | self.assertEqual(doc.embedded_field.int_field, 1) 253 | self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) 254 | self.assertEqual(doc.embedded_field.list_field[0], '1') 255 | self.assertEqual(doc.embedded_field.list_field[1], 2) 256 | 257 | embedded_field = doc.embedded_field.list_field[2] 258 | 259 | self.assertEqual(embedded_field.__class__, Embedded) 260 | self.assertEqual(embedded_field.string_field, "hello") 261 | self.assertEqual(embedded_field.int_field, 1) 262 | self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) 263 | self.assertEqual(embedded_field.list_field, ['1', 2, 264 | {'hello': 'world'}]) 265 | 266 | def test_dynamic_and_embedded(self): 267 | """Ensure embedded documents play nicely""" 268 | 269 | class Address(EmbeddedDocument): 270 | city = StringField() 271 | 272 | class Person(DynamicDocument): 273 | name = StringField() 274 | 275 | Person.drop_collection() 276 | 277 | Person(name="Ross", address=Address(city="London")).save() 278 | 279 | person = Person.objects.first() 280 | person.address.city = "Lundenne" 281 | person.save() 282 | 283 | self.assertEqual(Person.objects.first().address.city, "Lundenne") 284 | 285 | person = Person.objects.first() 286 | person.address = Address(city="Londinium") 287 | person.save() 288 | 289 | self.assertEqual(Person.objects.first().address.city, "Londinium") 290 | 291 | person = Person.objects.first() 292 | person.age = 35 293 | person.save() 294 | self.assertEqual(Person.objects.first().age, 35) 295 | 296 | 297 | if __name__ == '__main__': 298 | unittest.main() 299 | -------------------------------------------------------------------------------- /tests/document/test_json_serialisation.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path[0:0] = [""] 3 | 4 | import unittest 5 | import uuid 6 | 7 | from nose.plugins.skip import SkipTest 8 | from datetime import datetime 9 | from bson import ObjectId 10 | 11 | import pymongo 12 | 13 | from mongoengine import * 14 | 15 | __all__ = ("TestJson",) 16 | 17 | 18 | class TestJson(unittest.TestCase): 19 | 20 | def setUp(self): 21 | connect(db='mongoenginetest') 22 | 23 | def test_json_simple(self): 24 | 25 | class Embedded(EmbeddedDocument): 26 | string = StringField() 27 | 28 | class Doc(Document): 29 | string = StringField() 30 | embedded_field = EmbeddedDocumentField(Embedded) 31 | 32 | doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) 33 | 34 | self.assertEqual(doc, Doc.from_json(doc.to_json())) 35 | 36 | def test_json_complex(self): 37 | 38 | if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3: 39 | raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs") 40 | 41 | class EmbeddedDoc(EmbeddedDocument): 42 | pass 43 | 44 | class Simple(Document): 45 | pass 46 | 47 | class Doc(Document): 48 | string_field = StringField(default='1') 49 | int_field = IntField(default=1) 50 | float_field = FloatField(default=1.1) 51 | boolean_field = BooleanField(default=True) 52 | datetime_field = DateTimeField(default=datetime.now) 53 | embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, 54 | default=lambda: EmbeddedDoc()) 55 | list_field = ListField(default=lambda: [1, 2, 3]) 56 | dict_field = DictField(default=lambda: {"hello": "world"}) 57 | objectid_field = ObjectIdField(default=ObjectId) 58 | reference_field = ReferenceField(Simple, default=lambda: 59 | Simple().save()) 60 | map_field = MapField(IntField(), default=lambda: {"simple": 1}) 61 | complex_datetime_field = ComplexDateTimeField(default=datetime.now) 62 | url_field = URLField(default="http://mongoengine.org") 63 | dynamic_field = DynamicField(default=1) 64 | generic_reference_field = GenericReferenceField( 65 | default=lambda: Simple().save()) 66 | sorted_list_field = SortedListField(IntField(), 67 | default=lambda: [1, 2, 3]) 68 | email_field = EmailField(default="ross@example.com") 69 | geo_point_field = GeoPointField(default=lambda: [1, 2]) 70 | sequence_field = SequenceField() 71 | uuid_field = UUIDField(default=uuid.uuid4) 72 | generic_embedded_document_field = GenericEmbeddedDocumentField( 73 | default=lambda: EmbeddedDoc()) 74 | 75 | doc = Doc() 76 | self.assertEqual(doc, Doc.from_json(doc.to_json())) 77 | 78 | 79 | if __name__ == '__main__': 80 | unittest.main() 81 | -------------------------------------------------------------------------------- /tests/document/test_validation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | sys.path[0:0] = [""] 4 | 5 | import unittest 6 | from datetime import datetime 7 | 8 | from mongoengine import * 9 | 10 | __all__ = ("ValidatorErrorTest",) 11 | 12 | 13 | class ValidatorErrorTest(unittest.TestCase): 14 | 15 | def setUp(self): 16 | connect(db='mongoenginetest') 17 | 18 | def test_to_dict(self): 19 | """Ensure a ValidationError handles error to_dict correctly. 20 | """ 21 | error = ValidationError('root') 22 | self.assertEqual(error.to_dict(), {}) 23 | 24 | # 1st level error schema 25 | error.errors = {'1st': ValidationError('bad 1st'), } 26 | self.assertTrue('1st' in error.to_dict()) 27 | self.assertEqual(error.to_dict()['1st'], 'bad 1st') 28 | 29 | # 2nd level error schema 30 | error.errors = {'1st': ValidationError('bad 1st', errors={ 31 | '2nd': ValidationError('bad 2nd'), 32 | })} 33 | self.assertTrue('1st' in error.to_dict()) 34 | self.assertTrue(isinstance(error.to_dict()['1st'], dict)) 35 | self.assertTrue('2nd' in error.to_dict()['1st']) 36 | self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') 37 | 38 | # moar levels 39 | error.errors = {'1st': ValidationError('bad 1st', errors={ 40 | '2nd': ValidationError('bad 2nd', errors={ 41 | '3rd': ValidationError('bad 3rd', errors={ 42 | '4th': ValidationError('Inception'), 43 | }), 44 | }), 45 | })} 46 | self.assertTrue('1st' in error.to_dict()) 47 | self.assertTrue('2nd' in error.to_dict()['1st']) 48 | self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) 49 | self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) 50 | self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], 51 | 'Inception') 52 | 53 | self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") 54 | 55 | def test_model_validation(self): 56 | class User(Document): 57 | username = StringField(primary_key=True) 58 | name = StringField(required=True) 59 | 60 | User.drop_collection() 61 | 62 | try: 63 | User().validate() 64 | except ValidationError as e: 65 | self.assertTrue("User:None" in e.message) 66 | self.assertEqual(e.to_dict(), { 67 | 'username': 'Field is required', 68 | 'name': 'Field is required'}) 69 | 70 | user = User(username="RossC0", name="Ross").save() 71 | user.name = None 72 | try: 73 | user.save() 74 | except ValidationError as e: 75 | self.assertTrue("User:RossC0" in e.message) 76 | self.assertEqual(e.to_dict(), { 77 | 'name': 'Field is required'}) 78 | 79 | def test_fields_rewrite(self): 80 | class BasePerson(Document): 81 | name = StringField() 82 | age = IntField() 83 | meta = {'abstract': True} 84 | 85 | class Person(BasePerson): 86 | name = StringField(required=True) 87 | 88 | p = Person(age=15) 89 | self.assertRaises(ValidationError, p.validate) 90 | 91 | def test_datetime_validation(self): 92 | class DTDoc(Document): 93 | date = DateTimeField() 94 | 95 | dtd = DTDoc() 96 | dtd.date = 'whatever' 97 | self.assertRaises(ValidationError, dtd.save) 98 | 99 | # make sure that passing a parsable datetime works 100 | dtd = DTDoc() 101 | dtd.date = str(datetime.utcnow()) 102 | dtd.save() 103 | dtd.reload() 104 | self.assertTrue(isinstance(dtd.date, datetime)) 105 | 106 | def test_embedded_document_validation(self): 107 | """Ensure that embedded documents may be validated. 108 | """ 109 | class Comment(EmbeddedDocument): 110 | date = DateTimeField() 111 | content = StringField(required=True) 112 | 113 | comment = Comment() 114 | self.assertRaises(ValidationError, comment.validate) 115 | 116 | comment.content = 'test' 117 | comment.validate() 118 | 119 | comment.date = 4 120 | self.assertRaises(ValidationError, comment.validate) 121 | 122 | comment.date = datetime.now() 123 | comment.validate() 124 | self.assertEqual(comment._instance, None) 125 | 126 | def test_embedded_db_field_validate(self): 127 | 128 | class SubDoc(EmbeddedDocument): 129 | val = IntField(required=True) 130 | 131 | class Doc(Document): 132 | id = StringField(primary_key=True) 133 | e = EmbeddedDocumentField(SubDoc, db_field='eb') 134 | 135 | try: 136 | Doc(id="bad").validate() 137 | except ValidationError as e: 138 | self.assertTrue("SubDoc:None" in e.message) 139 | self.assertEqual(e.to_dict(), { 140 | "e": {'val': 'OK could not be converted to int'}}) 141 | 142 | Doc.drop_collection() 143 | 144 | Doc(id="test", e=SubDoc(val=15)).save() 145 | 146 | doc = Doc.objects.first() 147 | keys = list(doc.to_dict().keys()) 148 | self.assertEqual(2, len(keys)) 149 | self.assertTrue('e' in keys) 150 | self.assertTrue('id' in keys) 151 | 152 | with self.assertRaises(ValueError): 153 | doc.e.val = "OK" 154 | 155 | 156 | if __name__ == '__main__': 157 | unittest.main() 158 | -------------------------------------------------------------------------------- /tests/fields/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/closeio/mongoengine/41d1bf8cc0d74170bf4a7669da08aa77dfbc11b1/tests/fields/__init__.py -------------------------------------------------------------------------------- /tests/fields/mongodb_leaf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/closeio/mongoengine/41d1bf8cc0d74170bf4a7669da08aa77dfbc11b1/tests/fields/mongodb_leaf.png -------------------------------------------------------------------------------- /tests/fields/mongoengine.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/closeio/mongoengine/41d1bf8cc0d74170bf4a7669da08aa77dfbc11b1/tests/fields/mongoengine.png -------------------------------------------------------------------------------- /tests/fields/test_geo.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | sys.path[0:0] = [""] 4 | 5 | import unittest 6 | 7 | from mongoengine import * 8 | from mongoengine.connection import get_db 9 | 10 | __all__ = ("GeoFieldTest", ) 11 | 12 | 13 | @unittest.skip("geo fields not implemented") 14 | class GeoFieldTest(unittest.TestCase): 15 | 16 | def setUp(self): 17 | connect(db='mongoenginetest') 18 | self.db = get_db() 19 | 20 | def _test_for_expected_error(self, Cls, loc, expected): 21 | try: 22 | Cls(loc=loc).validate() 23 | self.fail() 24 | except ValidationError as e: 25 | self.assertEqual(expected, e.to_dict()['loc']) 26 | 27 | def test_geopoint_validation(self): 28 | class Location(Document): 29 | loc = GeoPointField() 30 | 31 | invalid_coords = [{"x": 1, "y": 2}, 5, "a"] 32 | expected = 'GeoPointField can only accept tuples or lists of (x, y)' 33 | 34 | for coord in invalid_coords: 35 | self._test_for_expected_error(Location, coord, expected) 36 | 37 | invalid_coords = [[], [1], [1, 2, 3]] 38 | for coord in invalid_coords: 39 | expected = "Value (%s) must be a two-dimensional point" % repr(coord) 40 | self._test_for_expected_error(Location, coord, expected) 41 | 42 | invalid_coords = [[{}, {}], ("a", "b")] 43 | for coord in invalid_coords: 44 | expected = "Both values (%s) in point must be float or int" % repr(coord) 45 | self._test_for_expected_error(Location, coord, expected) 46 | 47 | def test_point_validation(self): 48 | class Location(Document): 49 | loc = PointField() 50 | 51 | invalid_coords = {"x": 1, "y": 2} 52 | expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)' 53 | self._test_for_expected_error(Location, invalid_coords, expected) 54 | 55 | invalid_coords = {"type": "MadeUp", "coordinates": []} 56 | expected = 'PointField type must be "Point"' 57 | self._test_for_expected_error(Location, invalid_coords, expected) 58 | 59 | invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]} 60 | expected = "Value ([1, 2, 3]) must be a two-dimensional point" 61 | self._test_for_expected_error(Location, invalid_coords, expected) 62 | 63 | invalid_coords = [5, "a"] 64 | expected = "PointField can only accept lists of [x, y]" 65 | for coord in invalid_coords: 66 | self._test_for_expected_error(Location, coord, expected) 67 | 68 | invalid_coords = [[], [1], [1, 2, 3]] 69 | for coord in invalid_coords: 70 | expected = "Value (%s) must be a two-dimensional point" % repr(coord) 71 | self._test_for_expected_error(Location, coord, expected) 72 | 73 | invalid_coords = [[{}, {}], ("a", "b")] 74 | for coord in invalid_coords: 75 | expected = "Both values (%s) in point must be float or int" % repr(coord) 76 | self._test_for_expected_error(Location, coord, expected) 77 | 78 | Location(loc=[1, 2]).validate() 79 | 80 | def test_linestring_validation(self): 81 | class Location(Document): 82 | loc = LineStringField() 83 | 84 | invalid_coords = {"x": 1, "y": 2} 85 | expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' 86 | self._test_for_expected_error(Location, invalid_coords, expected) 87 | 88 | invalid_coords = {"type": "MadeUp", "coordinates": [[]]} 89 | expected = 'LineStringField type must be "LineString"' 90 | self._test_for_expected_error(Location, invalid_coords, expected) 91 | 92 | invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} 93 | expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" 94 | self._test_for_expected_error(Location, invalid_coords, expected) 95 | 96 | invalid_coords = [5, "a"] 97 | expected = "Invalid LineString must contain at least one valid point" 98 | self._test_for_expected_error(Location, invalid_coords, expected) 99 | 100 | invalid_coords = [[1]] 101 | expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) 102 | self._test_for_expected_error(Location, invalid_coords, expected) 103 | 104 | invalid_coords = [[1, 2, 3]] 105 | expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) 106 | self._test_for_expected_error(Location, invalid_coords, expected) 107 | 108 | invalid_coords = [[[{}, {}]], [("a", "b")]] 109 | for coord in invalid_coords: 110 | expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) 111 | self._test_for_expected_error(Location, coord, expected) 112 | 113 | Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate() 114 | 115 | def test_polygon_validation(self): 116 | class Location(Document): 117 | loc = PolygonField() 118 | 119 | invalid_coords = {"x": 1, "y": 2} 120 | expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' 121 | self._test_for_expected_error(Location, invalid_coords, expected) 122 | 123 | invalid_coords = {"type": "MadeUp", "coordinates": [[]]} 124 | expected = 'PolygonField type must be "Polygon"' 125 | self._test_for_expected_error(Location, invalid_coords, expected) 126 | 127 | invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]} 128 | expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point" 129 | self._test_for_expected_error(Location, invalid_coords, expected) 130 | 131 | invalid_coords = [[[5, "a"]]] 132 | expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" 133 | self._test_for_expected_error(Location, invalid_coords, expected) 134 | 135 | invalid_coords = [[[]]] 136 | expected = "Invalid Polygon must contain at least one valid linestring" 137 | self._test_for_expected_error(Location, invalid_coords, expected) 138 | 139 | invalid_coords = [[[1, 2, 3]]] 140 | expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point" 141 | self._test_for_expected_error(Location, invalid_coords, expected) 142 | 143 | invalid_coords = [[[{}, {}]], [("a", "b")]] 144 | expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int" 145 | self._test_for_expected_error(Location, invalid_coords, expected) 146 | 147 | invalid_coords = [[[1, 2], [3, 4]]] 148 | expected = "Invalid Polygon:\nLineStrings must start and end at the same point" 149 | self._test_for_expected_error(Location, invalid_coords, expected) 150 | 151 | Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() 152 | 153 | def test_indexes_geopoint(self): 154 | """Ensure that indexes are created automatically for GeoPointFields. 155 | """ 156 | class Event(Document): 157 | title = StringField() 158 | location = GeoPointField() 159 | 160 | geo_indicies = Event._geo_indices() 161 | self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}]) 162 | 163 | def test_geopoint_embedded_indexes(self): 164 | """Ensure that indexes are created automatically for GeoPointFields on 165 | embedded documents. 166 | """ 167 | class Venue(EmbeddedDocument): 168 | location = GeoPointField() 169 | name = StringField() 170 | 171 | class Event(Document): 172 | title = StringField() 173 | venue = EmbeddedDocumentField(Venue) 174 | 175 | geo_indicies = Event._geo_indices() 176 | self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}]) 177 | 178 | def test_indexes_2dsphere(self): 179 | """Ensure that indexes are created automatically for GeoPointFields. 180 | """ 181 | class Event(Document): 182 | title = StringField() 183 | point = PointField() 184 | line = LineStringField() 185 | polygon = PolygonField() 186 | 187 | geo_indicies = Event._geo_indices() 188 | self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies) 189 | self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies) 190 | self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies) 191 | 192 | def test_indexes_2dsphere_embedded(self): 193 | """Ensure that indexes are created automatically for GeoPointFields. 194 | """ 195 | class Venue(EmbeddedDocument): 196 | name = StringField() 197 | point = PointField() 198 | line = LineStringField() 199 | polygon = PolygonField() 200 | 201 | class Event(Document): 202 | title = StringField() 203 | venue = EmbeddedDocumentField(Venue) 204 | 205 | geo_indicies = Event._geo_indices() 206 | self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies) 207 | self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies) 208 | self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies) 209 | 210 | def test_geo_indexes_recursion(self): 211 | 212 | class Location(Document): 213 | name = StringField() 214 | location = GeoPointField() 215 | 216 | class Parent(Document): 217 | name = StringField() 218 | location = ReferenceField(Location) 219 | 220 | Location.drop_collection() 221 | Parent.drop_collection() 222 | 223 | list(Parent.objects) 224 | 225 | collection = Parent._get_collection() 226 | info = collection.index_information() 227 | 228 | self.assertFalse('location_2d' in info) 229 | 230 | self.assertEqual(len(Parent._geo_indices()), 0) 231 | self.assertEqual(len(Location._geo_indices()), 1) 232 | 233 | def test_geo_indexes_auto_index(self): 234 | 235 | # Test just listing the fields 236 | class Log(Document): 237 | location = PointField(auto_index=False) 238 | datetime = DateTimeField() 239 | 240 | meta = { 241 | 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] 242 | } 243 | 244 | self.assertEqual([], Log._geo_indices()) 245 | 246 | Log.drop_collection() 247 | Log.ensure_indexes() 248 | 249 | info = Log._get_collection().index_information() 250 | self.assertEqual(info["location_2dsphere_datetime_1"]["key"], 251 | [('location', '2dsphere'), ('datetime', 1)]) 252 | 253 | # Test listing explicitly 254 | class Log(Document): 255 | location = PointField(auto_index=False) 256 | datetime = DateTimeField() 257 | 258 | meta = { 259 | 'indexes': [ 260 | {'fields': [("location", "2dsphere"), ("datetime", 1)]} 261 | ] 262 | } 263 | 264 | self.assertEqual([], Log._geo_indices()) 265 | 266 | Log.drop_collection() 267 | Log.ensure_indexes() 268 | 269 | info = Log._get_collection().index_information() 270 | self.assertEqual(info["location_2dsphere_datetime_1"]["key"], 271 | [('location', '2dsphere'), ('datetime', 1)]) 272 | 273 | 274 | if __name__ == '__main__': 275 | unittest.main() 276 | -------------------------------------------------------------------------------- /tests/fixtures.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from datetime import datetime 3 | 4 | from mongoengine import * 5 | from mongoengine import signals 6 | 7 | 8 | class PickleEmbedded(EmbeddedDocument): 9 | date = DateTimeField(default=datetime.now) 10 | 11 | 12 | class PickleTest(Document): 13 | number = IntField() 14 | string = StringField(choices=(('One', '1'), ('Two', '2'))) 15 | embedded = EmbeddedDocumentField(PickleEmbedded) 16 | lists = ListField(StringField()) 17 | photo = FileField() 18 | 19 | 20 | class PickleSignalsTest(Document): 21 | number = IntField() 22 | string = StringField(choices=(('One', '1'), ('Two', '2'))) 23 | embedded = EmbeddedDocumentField(PickleEmbedded) 24 | lists = ListField(StringField()) 25 | 26 | @classmethod 27 | def post_save(self, sender, document, created, **kwargs): 28 | pickled = pickle.dumps(document) 29 | 30 | @classmethod 31 | def post_delete(self, sender, document, **kwargs): 32 | pickled = pickle.dumps(document) 33 | 34 | signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) 35 | signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) 36 | 37 | 38 | class Mixin(object): 39 | name = StringField() 40 | 41 | 42 | class Base(Document): 43 | meta = {'allow_inheritance': True} 44 | -------------------------------------------------------------------------------- /tests/queryset/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/closeio/mongoengine/41d1bf8cc0d74170bf4a7669da08aa77dfbc11b1/tests/queryset/__init__.py -------------------------------------------------------------------------------- /tests/queryset/test_transform.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path[0:0] = [""] 3 | 4 | import unittest 5 | 6 | from mongoengine import * 7 | from mongoengine.queryset import Q 8 | from mongoengine.queryset import transform 9 | 10 | __all__ = ("TransformTest",) 11 | 12 | 13 | class TransformTest(unittest.TestCase): 14 | 15 | def setUp(self): 16 | connect(db='mongoenginetest') 17 | 18 | def test_transform_query(self): 19 | """Ensure that the _transform_query function operates correctly. 20 | """ 21 | self.assertEqual(transform.query(name='test', age=30), 22 | {'name': 'test', 'age': 30}) 23 | self.assertEqual(transform.query(age__lt=30), 24 | {'age': {'$lt': 30}}) 25 | self.assertEqual(transform.query(age__gt=20, age__lt=50), 26 | {'age': {'$gt': 20, '$lt': 50}}) 27 | self.assertEqual(transform.query(age=20, age__gt=50), 28 | {'$and': [{'age': {'$gt': 50}}, {'age': 20}]}) 29 | self.assertEqual(transform.query(friend__age__gte=30), 30 | {'friend.age': {'$gte': 30}}) 31 | self.assertEqual(transform.query(name__exists=True), 32 | {'name': {'$exists': True}}) 33 | 34 | def test_query_field_name(self): 35 | """Ensure that the correct field name is used when querying. 36 | """ 37 | class Comment(EmbeddedDocument): 38 | content = StringField(db_field='commentContent') 39 | 40 | class BlogPost(Document): 41 | title = StringField(db_field='postTitle') 42 | comments = ListField(EmbeddedDocumentField(Comment), 43 | db_field='postComments') 44 | 45 | BlogPost.drop_collection() 46 | 47 | data = {'title': 'Post 1', 'comments': [Comment(content='test')]} 48 | post = BlogPost(**data) 49 | post.save() 50 | 51 | self.assertTrue('postTitle' in 52 | BlogPost.objects(title=data['title'])._query) 53 | self.assertFalse('title' in 54 | BlogPost.objects(title=data['title'])._query) 55 | self.assertEqual(BlogPost.objects(title=data['title']).count(), 1) 56 | 57 | self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query) 58 | self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) 59 | 60 | self.assertTrue('postComments.commentContent' in 61 | BlogPost.objects(comments__content='test')._query) 62 | self.assertEqual(BlogPost.objects(comments__content='test').count(), 1) 63 | 64 | BlogPost.drop_collection() 65 | 66 | @unittest.skip("unsupported") 67 | def test_query_pk_field_name(self): 68 | """Ensure that the correct "primary key" field name is used when 69 | querying 70 | """ 71 | class BlogPost(Document): 72 | title = StringField(primary_key=True, db_field='postTitle') 73 | 74 | BlogPost.drop_collection() 75 | 76 | data = {'title': 'Post 1'} 77 | post = BlogPost(**data) 78 | post.save() 79 | 80 | self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query) 81 | self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query) 82 | self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1) 83 | 84 | BlogPost.drop_collection() 85 | 86 | def test_chaining(self): 87 | class A(Document): 88 | pass 89 | 90 | class B(Document): 91 | a = ReferenceField(A) 92 | 93 | A.drop_collection() 94 | B.drop_collection() 95 | 96 | a1 = A().save() 97 | a2 = A().save() 98 | 99 | B(a=a1).save() 100 | 101 | # Works 102 | q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query 103 | 104 | # Doesn't work 105 | q2 = B.objects.filter(a__in=[a1, a2]) 106 | q2 = q2.filter(a=a1)._query 107 | 108 | self.assertEqual(q1, q2) 109 | 110 | def test_raw_query_and_Q_objects(self): 111 | """ 112 | Test raw plays nicely 113 | """ 114 | class Foo(Document): 115 | name = StringField() 116 | a = StringField() 117 | b = StringField() 118 | c = StringField() 119 | 120 | meta = { 121 | 'allow_inheritance': False 122 | } 123 | 124 | query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query 125 | self.assertEqual(query, {'$nor': [{'name': 'bar'}]}) 126 | 127 | q1 = {'$or': [{'a': 1}, {'b': 1}]} 128 | query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query 129 | self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1}) 130 | 131 | def test_raw_and_merging(self): 132 | class Doc(Document): 133 | meta = {'allow_inheritance': False} 134 | 135 | raw_query = Doc.objects(__raw__={'deleted': False, 136 | 'scraped': 'yes', 137 | '$nor': [{'views.extracted': 'no'}, 138 | {'attachments.views.extracted':'no'}] 139 | })._query 140 | 141 | expected = {'deleted': False, 'scraped': 'yes', 142 | '$nor': [{'views.extracted': 'no'}, 143 | {'attachments.views.extracted': 'no'}]} 144 | self.assertEqual(expected, raw_query) 145 | 146 | 147 | if __name__ == '__main__': 148 | unittest.main() 149 | -------------------------------------------------------------------------------- /tests/test_all_warnings.py: -------------------------------------------------------------------------------- 1 | """ 2 | This test has been put into a module. This is because it tests warnings that 3 | only get triggered on first hit. This way we can ensure its imported into the 4 | top level and called first by the test suite. 5 | """ 6 | import sys 7 | sys.path[0:0] = [""] 8 | import unittest 9 | import warnings 10 | 11 | from mongoengine import * 12 | 13 | 14 | __all__ = ('AllWarnings', ) 15 | 16 | 17 | class AllWarnings(unittest.TestCase): 18 | 19 | def setUp(self): 20 | connect(db='mongoenginetest') 21 | self.warning_list = [] 22 | self.showwarning_default = warnings.showwarning 23 | warnings.showwarning = self.append_to_warning_list 24 | 25 | def append_to_warning_list(self, message, category, *args): 26 | self.warning_list.append({"message": message, 27 | "category": category}) 28 | 29 | def tearDown(self): 30 | # restore default handling of warnings 31 | warnings.showwarning = self.showwarning_default 32 | 33 | def test_document_collection_syntax_warning(self): 34 | 35 | class NonAbstractBase(Document): 36 | meta = {'allow_inheritance': True} 37 | 38 | class InheritedDocumentFailTest(NonAbstractBase): 39 | meta = {'collection': 'fail'} 40 | 41 | warning = self.warning_list[0] 42 | self.assertEqual(SyntaxWarning, warning["category"]) 43 | self.assertEqual('non_abstract_base', 44 | InheritedDocumentFailTest._get_collection_name()) 45 | -------------------------------------------------------------------------------- /tests/test_benchmark.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from timeit import repeat 3 | 4 | from mongoengine import * 5 | 6 | conn_settings = { 7 | "db": "mongoenginetest", 8 | } 9 | 10 | connect(**conn_settings) 11 | 12 | def timeit(f, n=10000): 13 | return min(repeat(f, repeat=3, number=n))/float(n) 14 | 15 | class BenchmarkTestCase(unittest.TestCase): 16 | def setUp(self): 17 | pass 18 | 19 | def test_basic(self): 20 | class Book(Document): 21 | name = StringField() 22 | pages = IntField() 23 | tags = ListField(StringField()) 24 | is_published = BooleanField() 25 | 26 | Book.drop_collection() 27 | 28 | create_book = lambda: Book(name='Always be closing', pages=100, tags=['self-help', 'sales'], is_published=True) 29 | print('Doc initialization: %.3fus' % (timeit(create_book, 1000) * 10**6)) 30 | 31 | b = create_book() 32 | 33 | print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6)) 34 | 35 | print('Doc setattr: %.3fus' % (timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6)) 36 | 37 | print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6)) 38 | 39 | def save_book(): 40 | b._mark_as_changed('name') 41 | b._mark_as_changed('tags') 42 | b.save() 43 | 44 | save_book() 45 | son = b.to_mongo() 46 | 47 | print('Load from SON: %.3fus' % (timeit(lambda: Book._from_son(son), 1000) * 10**6)) 48 | 49 | print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6)) 50 | 51 | print('Load from database: %.3fus' % (timeit(lambda: Book.objects[0], 100) * 10**6)) 52 | 53 | def test_embedded(self): 54 | class Contact(EmbeddedDocument): 55 | name = StringField() 56 | title = StringField() 57 | address = StringField() 58 | 59 | class Company(Document): 60 | name = StringField() 61 | contacts = ListField(EmbeddedDocumentField(Contact)) 62 | 63 | Company.drop_collection() 64 | 65 | def get_company(): 66 | return Company( 67 | name='Elastic', 68 | contacts=[ 69 | Contact( 70 | name='Contact %d' % x, 71 | title='CEO', 72 | address='Address %d' % x, 73 | ) 74 | for x in range(1000)] 75 | ) 76 | 77 | def create_company(): 78 | c = get_company() 79 | c.save() 80 | c.delete() 81 | 82 | print('Save/delete big object to database: %.3fms' % (timeit(create_company, 10) * 10**3)) 83 | 84 | c = get_company().save() 85 | 86 | print('Serialize big object from database: %.3fms' % (timeit(c.to_mongo, 100) * 10**3)) 87 | print('Load big object from database: %.3fms' % (timeit(lambda: Company.objects[0], 100) * 10**3)) 88 | 89 | 90 | if __name__ == '__main__': 91 | unittest.main() 92 | -------------------------------------------------------------------------------- /tests/test_connection.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path[0:0] = [""] 4 | import datetime 5 | import unittest 6 | 7 | import pymongo 8 | from bson.tz_util import utc 9 | 10 | import mongoengine.connection 11 | from mongoengine import * 12 | from mongoengine.connection import ConnectionError, get_connection, get_db 13 | 14 | 15 | class ConnectionTest(unittest.TestCase): 16 | 17 | def tearDown(self): 18 | mongoengine.connection._connection_settings = {} 19 | mongoengine.connection._connections = {} 20 | mongoengine.connection._dbs = {} 21 | 22 | def test_connect(self): 23 | """Ensure that the connect() method works properly. 24 | """ 25 | connect("mongoenginetest") 26 | 27 | conn = get_connection() 28 | self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) 29 | 30 | db = get_db() 31 | self.assertTrue(isinstance(db, pymongo.database.Database)) 32 | self.assertEqual(db.name, "mongoenginetest") 33 | 34 | connect("mongoenginetest2", alias="testdb") 35 | conn = get_connection('testdb') 36 | self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) 37 | 38 | def test_register_connection(self): 39 | """Ensure that connections with different aliases may be registered. 40 | """ 41 | register_connection('testdb', 'mongoenginetest2') 42 | 43 | self.assertRaises(ConnectionError, get_connection) 44 | conn = get_connection('testdb') 45 | self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) 46 | 47 | db = get_db('testdb') 48 | self.assertTrue(isinstance(db, pymongo.database.Database)) 49 | self.assertEqual(db.name, 'mongoenginetest2') 50 | 51 | def test_connection_kwargs(self): 52 | """Ensure that connection kwargs get passed to pymongo. 53 | """ 54 | connect('mongoenginetest', alias='t1', tz_aware=True) 55 | conn = get_connection('t1') 56 | self.assertTrue(conn.codec_options.tz_aware) 57 | 58 | connect('mongoenginetest2', alias='t2') 59 | conn = get_connection('t2') 60 | self.assertFalse(conn.codec_options.tz_aware) 61 | 62 | def test_datetime(self): 63 | connect('mongoenginetest', tz_aware=True) 64 | d = datetime.datetime(2010, 5, 5, tzinfo=utc) 65 | 66 | class DateDoc(Document): 67 | the_date = DateTimeField(required=True) 68 | 69 | DateDoc.drop_collection() 70 | DateDoc(the_date=d).save() 71 | 72 | date_doc = DateDoc.objects.first() 73 | self.assertEqual(d, date_doc.the_date) 74 | 75 | def test_connect_uri_uuidrepresentation_default_to_pythonlegacy(self): 76 | conn = connect('mongoenginetest') 77 | self.assertEqual(conn.options.codec_options.uuid_representation, 78 | pymongo.common._UUID_REPRESENTATIONS['pythonLegacy']) 79 | 80 | def test_connect_uri_uuidrepresentation_set_as_arg(self): 81 | for uuid_representation_key in ["uuidrepresentation", 82 | "uuid_representation", "uuidRepresentation"]: 83 | conn = connect('mongoenginetest', **{uuid_representation_key: "javaLegacy"}) 84 | self.assertEqual(conn.options.codec_options.uuid_representation, 85 | pymongo.common._UUID_REPRESENTATIONS['javaLegacy']) 86 | 87 | 88 | if __name__ == '__main__': 89 | unittest.main() 90 | -------------------------------------------------------------------------------- /tests/test_context_managers.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path[0:0] = [""] 3 | import unittest 4 | 5 | from mongoengine import * 6 | from mongoengine.connection import get_db 7 | from mongoengine.context_managers import (switch_db, switch_collection, 8 | no_sub_classes, no_dereference, 9 | query_counter) 10 | 11 | 12 | class ContextManagersTest(unittest.TestCase): 13 | 14 | def test_switch_db_context_manager(self): 15 | connect('mongoenginetest') 16 | register_connection('testdb-1', 'mongoenginetest2') 17 | 18 | class Group(Document): 19 | name = StringField() 20 | 21 | Group.drop_collection() 22 | 23 | Group(name="hello - default").save() 24 | self.assertEqual(1, Group.objects.count()) 25 | 26 | with switch_db(Group, 'testdb-1') as Group: 27 | 28 | self.assertEqual(0, Group.objects.count()) 29 | 30 | Group(name="hello").save() 31 | 32 | self.assertEqual(1, Group.objects.count()) 33 | 34 | Group.drop_collection() 35 | self.assertEqual(0, Group.objects.count()) 36 | 37 | self.assertEqual(1, Group.objects.count()) 38 | 39 | def test_switch_collection_context_manager(self): 40 | connect('mongoenginetest') 41 | register_connection('testdb-1', 'mongoenginetest2') 42 | 43 | class Group(Document): 44 | name = StringField() 45 | 46 | Group.drop_collection() 47 | with switch_collection(Group, 'group1') as Group: 48 | Group.drop_collection() 49 | 50 | Group(name="hello - group").save() 51 | self.assertEqual(1, Group.objects.count()) 52 | 53 | with switch_collection(Group, 'group1') as Group: 54 | 55 | self.assertEqual(0, Group.objects.count()) 56 | 57 | Group(name="hello - group1").save() 58 | 59 | self.assertEqual(1, Group.objects.count()) 60 | 61 | Group.drop_collection() 62 | self.assertEqual(0, Group.objects.count()) 63 | 64 | self.assertEqual(1, Group.objects.count()) 65 | 66 | def test_no_dereference_context_manager_object_id(self): 67 | """Ensure that DBRef items in ListFields aren't dereferenced. 68 | """ 69 | connect('mongoenginetest') 70 | 71 | class User(Document): 72 | name = StringField() 73 | 74 | class Group(Document): 75 | ref = ReferenceField(User, dbref=False) 76 | generic = GenericReferenceField() 77 | members = ListField(ReferenceField(User, dbref=False)) 78 | 79 | User.drop_collection() 80 | Group.drop_collection() 81 | 82 | for i in range(1, 51): 83 | User(name='user %s' % i).save() 84 | 85 | user = User.objects.first() 86 | Group(ref=user, members=User.objects, generic=user).save() 87 | 88 | with no_dereference(Group) as NoDeRefGroup: 89 | self.assertTrue(Group._fields['members']._auto_dereference) 90 | self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) 91 | 92 | with no_dereference(Group) as Group: 93 | group = Group.objects.first() 94 | self.assertTrue(all([not isinstance(m, User) 95 | for m in group.members])) 96 | self.assertFalse(isinstance(group.ref, User)) 97 | self.assertFalse(isinstance(group.generic, User)) 98 | 99 | self.assertTrue(all([isinstance(m, User) 100 | for m in group.members])) 101 | self.assertTrue(isinstance(group.ref, User)) 102 | self.assertTrue(isinstance(group.generic, User)) 103 | 104 | def test_no_dereference_context_manager_dbref(self): 105 | """Ensure that DBRef items in ListFields aren't dereferenced. 106 | """ 107 | connect('mongoenginetest') 108 | 109 | class User(Document): 110 | name = StringField() 111 | 112 | class Group(Document): 113 | ref = ReferenceField(User, dbref=True) 114 | generic = GenericReferenceField() 115 | members = ListField(ReferenceField(User, dbref=True)) 116 | 117 | User.drop_collection() 118 | Group.drop_collection() 119 | 120 | for i in range(1, 51): 121 | User(name='user %s' % i).save() 122 | 123 | user = User.objects.first() 124 | Group(ref=user, members=User.objects, generic=user).save() 125 | 126 | with no_dereference(Group) as NoDeRefGroup: 127 | self.assertTrue(Group._fields['members']._auto_dereference) 128 | self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) 129 | 130 | with no_dereference(Group) as Group: 131 | group = Group.objects.first() 132 | self.assertTrue(all([not isinstance(m, User) 133 | for m in group.members])) 134 | self.assertFalse(isinstance(group.ref, User)) 135 | self.assertFalse(isinstance(group.generic, User)) 136 | 137 | self.assertTrue(all([isinstance(m, User) 138 | for m in group.members])) 139 | self.assertTrue(isinstance(group.ref, User)) 140 | self.assertTrue(isinstance(group.generic, User)) 141 | 142 | def test_no_sub_classes(self): 143 | class A(Document): 144 | x = IntField() 145 | y = IntField() 146 | 147 | meta = {'allow_inheritance': True} 148 | 149 | class B(A): 150 | z = IntField() 151 | 152 | class C(B): 153 | zz = IntField() 154 | 155 | A.drop_collection() 156 | 157 | A(x=10, y=20).save() 158 | A(x=15, y=30).save() 159 | B(x=20, y=40).save() 160 | B(x=30, y=50).save() 161 | C(x=40, y=60).save() 162 | 163 | self.assertEqual(A.objects.count(), 5) 164 | self.assertEqual(B.objects.count(), 3) 165 | self.assertEqual(C.objects.count(), 1) 166 | 167 | with no_sub_classes(A) as A: 168 | self.assertEqual(A.objects.count(), 2) 169 | 170 | for obj in A.objects: 171 | self.assertEqual(obj.__class__, A) 172 | 173 | with no_sub_classes(B) as B: 174 | self.assertEqual(B.objects.count(), 2) 175 | 176 | for obj in B.objects: 177 | self.assertEqual(obj.__class__, B) 178 | 179 | with no_sub_classes(C) as C: 180 | self.assertEqual(C.objects.count(), 1) 181 | 182 | for obj in C.objects: 183 | self.assertEqual(obj.__class__, C) 184 | 185 | # Confirm context manager exit correctly 186 | self.assertEqual(A.objects.count(), 5) 187 | self.assertEqual(B.objects.count(), 3) 188 | self.assertEqual(C.objects.count(), 1) 189 | 190 | def test_query_counter(self): 191 | connect('mongoenginetest') 192 | db = get_db() 193 | db.test.find({}) 194 | 195 | with query_counter() as q: 196 | self.assertEqual(0, q) 197 | 198 | for i in range(1, 51): 199 | db.test.estimated_document_count() 200 | 201 | self.assertEqual(50, q) 202 | 203 | if __name__ == '__main__': 204 | unittest.main() 205 | -------------------------------------------------------------------------------- /tests/test_signals.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | sys.path[0:0] = [""] 4 | import unittest 5 | 6 | from mongoengine import * 7 | from mongoengine import signals 8 | 9 | signal_output = [] 10 | 11 | 12 | class SignalTests(unittest.TestCase): 13 | """ 14 | Testing signals before/after saving and deleting. 15 | """ 16 | 17 | def get_signal_output(self, fn, *args, **kwargs): 18 | # Flush any existing signal output 19 | global signal_output 20 | signal_output = [] 21 | fn(*args, **kwargs) 22 | return signal_output 23 | 24 | def setUp(self): 25 | connect(db='mongoenginetest') 26 | 27 | class Author(Document): 28 | name = StringField() 29 | 30 | def __unicode__(self): 31 | return self.name 32 | 33 | @classmethod 34 | def pre_save(cls, sender, document, **kwargs): 35 | signal_output.append('pre_save signal, %s' % document) 36 | 37 | @classmethod 38 | def post_save(cls, sender, document, **kwargs): 39 | signal_output.append('post_save signal, %s' % document) 40 | if 'created' in kwargs: 41 | if kwargs['created']: 42 | signal_output.append('Is created') 43 | else: 44 | signal_output.append('Is updated') 45 | 46 | @classmethod 47 | def pre_delete(cls, sender, document, **kwargs): 48 | signal_output.append('pre_delete signal, %s' % document) 49 | 50 | @classmethod 51 | def post_delete(cls, sender, document, **kwargs): 52 | signal_output.append('post_delete signal, %s' % document) 53 | 54 | @classmethod 55 | def pre_bulk_insert(cls, sender, documents, **kwargs): 56 | signal_output.append('pre_bulk_insert signal, %s' % documents) 57 | 58 | @classmethod 59 | def post_bulk_insert(cls, sender, documents, **kwargs): 60 | signal_output.append('post_bulk_insert signal, %s' % documents) 61 | if kwargs.get('loaded', False): 62 | signal_output.append('Is loaded') 63 | else: 64 | signal_output.append('Not loaded') 65 | self.Author = Author 66 | Author.drop_collection() 67 | 68 | class Another(Document): 69 | 70 | name = StringField() 71 | 72 | def __unicode__(self): 73 | return self.name 74 | 75 | @classmethod 76 | def pre_delete(cls, sender, document, **kwargs): 77 | signal_output.append('pre_delete signal, %s' % document) 78 | 79 | @classmethod 80 | def post_delete(cls, sender, document, **kwargs): 81 | signal_output.append('post_delete signal, %s' % document) 82 | 83 | self.Another = Another 84 | Another.drop_collection() 85 | 86 | class ExplicitId(Document): 87 | id = IntField(primary_key=True) 88 | 89 | @classmethod 90 | def post_save(cls, sender, document, **kwargs): 91 | if 'created' in kwargs: 92 | if kwargs['created']: 93 | signal_output.append('Is created') 94 | else: 95 | signal_output.append('Is updated') 96 | 97 | self.ExplicitId = ExplicitId 98 | ExplicitId.drop_collection() 99 | 100 | # Save up the number of connected signals so that we can check at the 101 | # end that all the signals we register get properly unregistered 102 | self.pre_signals = ( 103 | len(signals.pre_save.receivers), 104 | len(signals.post_save.receivers), 105 | len(signals.pre_delete.receivers), 106 | len(signals.post_delete.receivers), 107 | len(signals.pre_bulk_insert.receivers), 108 | len(signals.post_bulk_insert.receivers), 109 | ) 110 | 111 | signals.pre_save.connect(Author.pre_save, sender=Author) 112 | signals.post_save.connect(Author.post_save, sender=Author) 113 | signals.pre_delete.connect(Author.pre_delete, sender=Author) 114 | signals.post_delete.connect(Author.post_delete, sender=Author) 115 | signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author) 116 | signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author) 117 | 118 | signals.pre_delete.connect(Another.pre_delete, sender=Another) 119 | signals.post_delete.connect(Another.post_delete, sender=Another) 120 | 121 | signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId) 122 | 123 | def tearDown(self): 124 | signals.post_delete.disconnect(self.Author.post_delete) 125 | signals.pre_delete.disconnect(self.Author.pre_delete) 126 | signals.post_save.disconnect(self.Author.post_save) 127 | signals.pre_save.disconnect(self.Author.pre_save) 128 | signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) 129 | signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) 130 | 131 | signals.post_delete.disconnect(self.Another.post_delete) 132 | signals.pre_delete.disconnect(self.Another.pre_delete) 133 | 134 | signals.post_save.disconnect(self.ExplicitId.post_save) 135 | 136 | # Check that all our signals got disconnected properly. 137 | post_signals = ( 138 | len(signals.pre_save.receivers), 139 | len(signals.post_save.receivers), 140 | len(signals.pre_delete.receivers), 141 | len(signals.post_delete.receivers), 142 | len(signals.pre_bulk_insert.receivers), 143 | len(signals.post_bulk_insert.receivers), 144 | ) 145 | 146 | self.ExplicitId.objects.delete() 147 | 148 | self.assertEqual(self.pre_signals, post_signals) 149 | 150 | def test_model_signals(self): 151 | """ Model saves should throw some signals. """ 152 | 153 | def bulk_create_author_with_load(): 154 | a1 = self.Author(name='Bill Shakespeare') 155 | self.Author.objects.insert([a1], load_bulk=True) 156 | 157 | def bulk_create_author_without_load(): 158 | a1 = self.Author(name='Bill Shakespeare') 159 | self.Author.objects.insert([a1], load_bulk=False) 160 | 161 | a1 = self.Author(name='Bill Shakespeare') 162 | self.assertEqual(self.get_signal_output(a1.save), [ 163 | "pre_save signal, Bill Shakespeare", 164 | "post_save signal, Bill Shakespeare", 165 | "Is created" 166 | ]) 167 | 168 | a1.reload() 169 | a1.name = 'William Shakespeare' 170 | self.assertEqual(self.get_signal_output(a1.save), [ 171 | "pre_save signal, William Shakespeare", 172 | "post_save signal, William Shakespeare", 173 | "Is updated" 174 | ]) 175 | 176 | self.assertEqual(self.get_signal_output(a1.delete), [ 177 | 'pre_delete signal, William Shakespeare', 178 | 'post_delete signal, William Shakespeare', 179 | ]) 180 | 181 | signal_output = self.get_signal_output(bulk_create_author_with_load) 182 | 183 | self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [ 184 | "pre_bulk_insert signal, []", 185 | "post_bulk_insert signal, []", 186 | "Is loaded", 187 | ]) 188 | 189 | self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ 190 | "pre_bulk_insert signal, []", 191 | "post_bulk_insert signal, []", 192 | "Not loaded", 193 | ]) 194 | 195 | def test_queryset_delete_signals(self): 196 | """ Queryset delete should throw some signals. """ 197 | 198 | self.Another(name='Bill Shakespeare').save() 199 | self.assertEqual(self.get_signal_output(self.Another.objects.delete), [ 200 | 'pre_delete signal, Bill Shakespeare', 201 | 'post_delete signal, Bill Shakespeare', 202 | ]) 203 | 204 | def test_signals_with_explicit_doc_ids(self): 205 | """ Model saves must have a created flag the first time.""" 206 | ei = self.ExplicitId(id=123) 207 | # post save must received the created flag, even if there's already 208 | # an object id present 209 | self.assertEqual(self.get_signal_output(ei.save), ['Is created']) 210 | # second time, it must be an update 211 | self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) 212 | 213 | if __name__ == '__main__': 214 | unittest.main() 215 | --------------------------------------------------------------------------------