├── .gitignore ├── .travis.yml ├── AUTHORS.rst ├── CONTRIBUTING.rst ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── appengine_fixture_loader ├── __init__.py └── loader.py ├── docs ├── Makefile ├── authors.rst ├── conf.py ├── contributing.rst ├── history.rst ├── index.rst ├── installation.rst ├── make.bat ├── readme.rst └── usage.rst ├── resources ├── Makefile ├── autogenerated ├── get_current_sdk_version.sh └── requirements.txt ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── ancestor_tests.py ├── ancestors_and_dogs.json ├── hard_coded_id.json ├── hard_coded_id_test.py ├── multi_kind_tests.py ├── multi_level_tests.py ├── persons.json ├── persons_and_dogs.json ├── persons_children_and_dogs.json └── single_level_tests.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | 56 | # Mr Developer 57 | .mr.developer.cfg 58 | .project 59 | .pydevproject 60 | 61 | # Emacs 62 | *# 63 | .#* 64 | *~ 65 | 66 | # Egg and SDK download caches 67 | cache 68 | 69 | # The virtual environments 70 | .env 71 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | install: 5 | pip install coveralls 6 | # command to run tests 7 | script: "make travis" 8 | after_success: 9 | coveralls -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Ricardo Bánffy 9 | 10 | Contributors 11 | ------------ 12 | 13 | John Del Rosario (john2x) 14 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | Contributions are welcome, and they are greatly appreciated! Every 6 | little bit helps, and credit will always be given. 7 | 8 | You can contribute in many ways: 9 | 10 | Types of Contributions 11 | ---------------------- 12 | 13 | Report Bugs 14 | ~~~~~~~~~~~ 15 | 16 | Report bugs at https://github.com/rbanffy/appengine-fixture-loader/issues. 17 | 18 | If you are reporting a bug, please include: 19 | 20 | * Your operating system name and version. 21 | * Any details about your local setup that might be helpful in troubleshooting. 22 | * Detailed steps to reproduce the bug. 23 | 24 | Fix Bugs 25 | ~~~~~~~~ 26 | 27 | Look through the GitHub issues for bugs. Anything tagged with "bug" 28 | is open to whoever wants to implement it. 29 | 30 | Implement Features 31 | ~~~~~~~~~~~~~~~~~~ 32 | 33 | Look through the GitHub issues for features. Anything tagged with "feature" 34 | is open to whoever wants to implement it. 35 | 36 | Write Documentation 37 | ~~~~~~~~~~~~~~~~~~~ 38 | 39 | App Engine Fixture Loader could always use more documentation, whether as part of the 40 | official App Engine Fixture Loader docs, in docstrings, or even on the web in blog posts, 41 | articles, and such. 42 | 43 | Submit Feedback 44 | ~~~~~~~~~~~~~~~ 45 | 46 | The best way to send feedback is to file an issue at https://github.com/rbanffy/appengine-fixture-loader/issues. 47 | 48 | If you are proposing a feature: 49 | 50 | * Explain in detail how it would work. 51 | * Keep the scope as narrow as possible, to make it easier to implement. 52 | * Remember that this is a volunteer-driven project, and that contributions 53 | are welcome :) 54 | 55 | Get Started! 56 | ------------ 57 | 58 | Ready to contribute? Here's how to set up `appengine-fixture-loader` for local development. 59 | 60 | 1. Fork the `appengine-fixture-loader` repo on GitHub. 61 | 2. Clone your fork locally:: 62 | 63 | $ git clone git@github.com:your_name_here/appengine-fixture-loader.git 64 | 65 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 66 | 67 | $ mkvirtualenv appengine-fixture-loader 68 | $ cd appengine-fixture-loader/ 69 | $ python setup.py develop 70 | 71 | 4. Create a branch for local development:: 72 | 73 | $ git checkout -b name-of-your-bugfix-or-feature 74 | 75 | Now you can make your changes locally. 76 | 77 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: 78 | 79 | $ flake8 appengine-fixture-loader tests 80 | $ python setup.py test 81 | $ tox 82 | 83 | To get flake8 and tox, just pip install them into your virtualenv. 84 | 85 | 6. Commit your changes and push your branch to GitHub:: 86 | 87 | $ git add . 88 | $ git commit -m "Your detailed description of your changes." 89 | $ git push origin name-of-your-bugfix-or-feature 90 | 91 | 7. Submit a pull request through the GitHub website. 92 | 93 | Pull Request Guidelines 94 | ----------------------- 95 | 96 | Before you submit a pull request, check that it meets these guidelines: 97 | 98 | 1. The pull request should include tests. 99 | 2. If the pull request adds functionality, the docs should be updated. Put 100 | your new functionality into a function with a docstring, and add the 101 | feature to the list in README.rst. 102 | 3. The pull request should work for Python 2.6, 2.7, 3.3, and 3.4, and for PyPy. Check 103 | https://travis-ci.org/rbanffy/appengine-fixture-loader/pull_requests 104 | and make sure that the tests pass for all supported Python versions. 105 | 106 | Tips 107 | ---- 108 | 109 | To run a subset of tests:: 110 | 111 | $ python -m unittest tests.test_appengine-fixture-loader 112 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | .. :changelog: 2 | 3 | History 4 | ------- 5 | 6 | 0.1.0 (2014-10-13) 7 | ------------------ 8 | 9 | * First release on GitHub. 10 | 11 | 0.1.1 (2014-12-4) 12 | ----------------- 13 | 14 | * Add support for multi-kind JSON files 15 | 16 | 0.1.2 (2014-12-4) 17 | ----------------- 18 | 19 | * Minor fixes 20 | 21 | 0.1.3 (2014-12-5) 22 | ----------------- 23 | 24 | * Added support for PropertyKey-based child entities 25 | 26 | 0.1.4 (2015-2-4) 27 | ----------------- 28 | 29 | * Fixed bug in which post-processor was called on every property change 30 | * Added section on development to README.rst 31 | 32 | 0.1.5 (2015-2-11) 33 | ----------------- 34 | 35 | * Added `__children__` support 36 | * Added manual key definition through the `__id__` attribute 37 | 38 | 0.1.6 (2015-8-30) 39 | ----------------- 40 | 41 | * Builds if you don't have `curl` installed 42 | * Minor documentation improvements 43 | 44 | 0.1.7 (2015-11-3) 45 | ----------------- 46 | 47 | * Syntax highlighting on the documentation 48 | * Coverage analysis using Coveralls 49 | 50 | 0.1.8 (2016-02-05) 51 | ------------------ 52 | 53 | * New resources/Makefile 54 | 55 | 0.1.9 (2016-12-19) 56 | ------------------ 57 | 58 | * Replace pep8 with pycodestyle 59 | * Update current SDK version detection to latest version 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2014 Ricardo L. A. Banffy 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-include tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat 12 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | include resources/Makefile 2 | 3 | # Override pycodestyle target 4 | pycodestyle: 5 | @find $(CURDIR)/appengine_fixture_loader/ -name '*.py' -exec pycodestyle {} \; 6 | @find $(CURDIR)/tests/ -name '*.py' -exec pycodestyle {} \; 7 | 8 | # The same for pyflakes 9 | pyflakes: 10 | @find $(CURDIR)/appengine_fixture_loader/ -name '*.py' -exec pyflakes {} \; 11 | @find $(CURDIR)/tests/ -name '*.py' -exec pyflakes {} \; 12 | 13 | package: 14 | @.env/bin/python2.7 setup.py sdist 15 | @.env/bin/python2.7 setup.py bdist 16 | 17 | upload: clean 18 | @.env/bin/python2.7 setup.py sdist upload 19 | @.env/bin/python2.7 setup.py bdist upload 20 | 21 | # Overriding TravisCI 22 | travis: venv package 23 | @.env/bin/coverage run --source=appengine_fixture_loader setup.py test 24 | 25 | clean: 26 | @rm -f dist/* 27 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | appengine-fixture-loader 2 | ======================== 3 | 4 | A simple way to load Django-like fixtures into the local development datastore, originally intended to be used by `testable_appengine `_. 5 | 6 | .. image:: https://img.shields.io/pypi/l/appengine-fixture-loader.svg 7 | :target: ./LICENSE 8 | 9 | .. image:: https://badge.fury.io/py/Appengine-Fixture-Loader.svg 10 | :target: https://pypi.python.org/pypi/appengine-fixture-loader/ 11 | 12 | .. image:: https://api.travis-ci.org/rbanffy/appengine-fixture-loader.svg 13 | :target: https://travis-ci.org/rbanffy/appengine-fixture-loader 14 | 15 | .. image:: https://img.shields.io/pypi/pyversions/appengine-fixture-loader.svg 16 | :target: https://pypi.python.org/pypi/appengine-fixture-loader/ 17 | 18 | .. image:: https://img.shields.io/pypi/dm/appengine-fixture-loader.svg 19 | :target: https://pypi.python.org/pypi/appengine-fixture-loader/ 20 | 21 | .. image:: https://coveralls.io/repos/rbanffy/appengine-fixture-loader/badge.svg?branch=master&service=github 22 | :target: https://coveralls.io/github/rbanffy/appengine-fixture-loader?branch=master 23 | 24 | Installing 25 | ---------- 26 | 27 | For the less adventurous, Appengine-Fixture-Loader is available on PyPI at https://pypi.python.org/pypi/Appengine-Fixture-Loader. 28 | 29 | Single-kind loads 30 | ------------------ 31 | 32 | Let's say you have a model like this: 33 | 34 | .. code-block:: python 35 | 36 | class Person(ndb.Model): 37 | """Our sample class""" 38 | first_name = ndb.StringProperty() 39 | last_name = ndb.StringProperty() 40 | born = ndb.DateTimeProperty() 41 | userid = ndb.IntegerProperty() 42 | thermostat_set_to = ndb.FloatProperty() 43 | snores = ndb.BooleanProperty() 44 | started_school = ndb.DateProperty() 45 | sleeptime = ndb.TimeProperty() 46 | favorite_movies = ndb.JsonProperty() 47 | processed = ndb.BooleanProperty(default=False) 48 | 49 | If you want to load a data file like this: 50 | 51 | .. code-block:: javascript 52 | 53 | [ 54 | { 55 | "__id__": "jdoe", 56 | "born": "1968-03-03T00:00:00", 57 | "first_name": "John", 58 | "last_name": "Doe", 59 | "favorite_movies": [ 60 | "2001", 61 | "The Day The Earth Stood Still (1951)" 62 | ], 63 | "snores": false, 64 | "sleeptime": "23:00", 65 | "started_school": "1974-02-15", 66 | "thermostat_set_to": 18.34, 67 | "userid": 1 68 | }, 69 | 70 | ... 71 | 72 | { 73 | "born": "1980-05-25T00:00:00", 74 | "first_name": "Bob", 75 | "last_name": "Schneier", 76 | "favorite_movies": [ 77 | "2001", 78 | "Superman" 79 | ], 80 | "snores": true, 81 | "sleeptime": "22:00", 82 | "started_school": "1985-08-01", 83 | "thermostat_set_to": 18.34, 84 | "userid": -5 85 | } 86 | ] 87 | 88 | All you need to do is to: 89 | 90 | .. code-block:: python 91 | 92 | from appengine_fixture_loader.loader import load_fixture 93 | 94 | and then: 95 | 96 | .. code-block:: python 97 | 98 | loaded_data = load_fixture('tests/persons.json', kind=Person) 99 | 100 | In our example, `loaded_data` will contain a list of already persisted Person models you can then manipulate and persist again. 101 | 102 | The `__id__` attribute, when defined, will save the object with that given id. In our case, the key to the first object defined will be a `ndb.Key('Person', 'jdoe')`. The key may be defined on an object by object base - where the `__id__` parameter is omitted, an automatic id will be generated - the key to the second one will be something like `ndb.Key('Person', 1)`. 103 | 104 | Multi-kind loads 105 | ---------------- 106 | 107 | It's convenient to be able to load multiple kinds of objects from a single file. For those cases, we provide a simple way to identify the kind of object being loaded and to provide a set of models to use when loading the objects. 108 | 109 | Consider our original example model: 110 | 111 | .. code-block:: python 112 | 113 | class Person(ndb.Model): 114 | """Our sample class""" 115 | first_name = ndb.StringProperty() 116 | last_name = ndb.StringProperty() 117 | born = ndb.DateTimeProperty() 118 | userid = ndb.IntegerProperty() 119 | thermostat_set_to = ndb.FloatProperty() 120 | snores = ndb.BooleanProperty() 121 | started_school = ndb.DateProperty() 122 | sleeptime = ndb.TimeProperty() 123 | favorite_movies = ndb.JsonProperty() 124 | processed = ndb.BooleanProperty(default=False) 125 | 126 | and let's add a second one: 127 | 128 | .. code-block:: python 129 | 130 | class Dog(ndb.Model): 131 | """Another sample class""" 132 | name = ndb.StringProperty() 133 | 134 | Now, if we wanted to make a single file load objects of the two kinds, we'd need to use the `__kind__` attribute in the JSON: 135 | 136 | .. code-block:: javascript 137 | 138 | [ 139 | { 140 | "__kind__": "Person", 141 | "born": "1968-03-03T00:00:00", 142 | "first_name": "John", 143 | "last_name": "Doe", 144 | "favorite_movies": [ 145 | "2001", 146 | "The Day The Earth Stood Still (1951)" 147 | ], 148 | "snores": false, 149 | "sleeptime": "23:00", 150 | "started_school": "1974-02-15", 151 | "thermostat_set_to": 18.34, 152 | "userid": 1 153 | }, 154 | { 155 | "__kind__": "Dog", 156 | "name": "Fido" 157 | } 158 | ] 159 | 160 | And, to load the file, we'd have to: 161 | 162 | .. code-block:: python 163 | 164 | from appengine_fixture_loader.loader import load_fixture 165 | 166 | and: 167 | 168 | .. code-block:: python 169 | 170 | loaded_data = load_fixture('tests/persons_and_dogs.json', 171 | kind={'Person': Person, 'Dog': Dog}) 172 | 173 | will result in a list of Persons and Dogs (in this case, one person and one dog). 174 | 175 | Multi-kind, multi-level loads 176 | ----------------------------- 177 | 178 | Anther common case is having hierarchies of entities that you want to reconstruct for your tests. 179 | 180 | Using slightly modified versions of our example classes: 181 | 182 | .. code-block:: python 183 | 184 | class Person(ndb.Model): 185 | """Our sample class""" 186 | first_name = ndb.StringProperty() 187 | last_name = ndb.StringProperty() 188 | born = ndb.DateTimeProperty() 189 | userid = ndb.IntegerProperty() 190 | thermostat_set_to = ndb.FloatProperty() 191 | snores = ndb.BooleanProperty() 192 | started_school = ndb.DateProperty() 193 | sleeptime = ndb.TimeProperty() 194 | favorite_movies = ndb.JsonProperty() 195 | processed = ndb.BooleanProperty(default=False) 196 | appropriate_adult = ndb.KeyProperty() 197 | 198 | and: 199 | 200 | .. code-block:: python 201 | 202 | class Dog(ndb.Model): 203 | """Another sample class""" 204 | name = ndb.StringProperty() 205 | processed = ndb.BooleanProperty(default=False) 206 | owner = ndb.KeyProperty() 207 | 208 | And using `__children__[attribute_name]__` like meta-attributes, as in: 209 | 210 | .. code-block:: javascript 211 | 212 | [ 213 | { 214 | "__kind__": "Person", 215 | "born": "1968-03-03T00:00:00", 216 | "first_name": "John", 217 | "last_name": "Doe", 218 | 219 | ... 220 | 221 | "__children__appropriate_adult__": [ 222 | { 223 | "__kind__": "Person", 224 | "born": "1970-04-27T00:00:00", 225 | 226 | ... 227 | 228 | "__children__appropriate_adult__": [ 229 | { 230 | "__kind__": "Person", 231 | "born": "1980-05-25T00:00:00", 232 | "first_name": "Bob", 233 | 234 | ... 235 | 236 | "userid": 3 237 | } 238 | ] 239 | } 240 | ] 241 | }, 242 | { 243 | "__kind__": "Person", 244 | "born": "1999-09-19T00:00:00", 245 | "first_name": "Alice", 246 | 247 | ... 248 | 249 | "__children__appropriate_adult__": [ 250 | { 251 | "__kind__": "Person", 252 | 253 | ... 254 | 255 | "__children__owner__": [ 256 | { 257 | "__kind__": "Dog", 258 | "name": "Fido" 259 | } 260 | ] 261 | } 262 | ] 263 | } 264 | ] 265 | 266 | you can reconstruct entire entity trees for your tests. 267 | 268 | Parent/Ancestor-based relationships with automatic keys 269 | ------------------------------------------------------- 270 | 271 | It's also possible to set the `parent` by using the `__children__` attribute. 272 | 273 | For our example classes, importing: 274 | 275 | .. code-block:: javascript 276 | 277 | [ 278 | { 279 | "__kind__": "Person", 280 | "first_name": "Alice", 281 | 282 | ... 283 | 284 | "__children__": [ 285 | { 286 | "__kind__": "Person", 287 | "first_name": "Bob", 288 | ... 289 | 290 | "__children__owner__": [ 291 | { 292 | "__kind__": "Dog", 293 | "name": "Fido" 294 | } 295 | ] 296 | } 297 | ] 298 | } 299 | ] 300 | 301 | should be equivalent to: 302 | 303 | .. code-block:: python 304 | 305 | alice = Person(first_name='Alice') 306 | alice.put() 307 | bob = Person(first_name='Bob', parent=alice) 308 | bob.put() 309 | fido = Dog(name='Fido', parent=bob) 310 | fido.put() 311 | 312 | You can then retrieve fido with: 313 | 314 | .. code-block:: python 315 | 316 | fido = Dog.query(ancestor=alice.key).get() 317 | 318 | 319 | Development 320 | =========== 321 | 322 | There are two recommended ways to work on this codebase. If you want to keep 323 | one and only one App Engine SDK install, you may clone the repository and run 324 | the tests by:: 325 | 326 | $ PYTHONPATH=path/to/appengine/library python setup.py test 327 | 328 | Alternatively, this project contains code and support files derived from the 329 | testable_appengine project. Testable_appengine was conceived to make it easier 330 | to write (and run) tests for Google App Engine applications and to hook your 331 | application to Travis CI. In essence, it creates a virtualenv and downloads the 332 | most up-to-date SDK and other support tools into it. To use it, you run 333 | `make`. Calling `make help` will give you a quick list of available make 334 | targets:: 335 | 336 | $ make venv 337 | Running virtualenv with interpreter /usr/bin/python2 338 | New python executable in /export/home/ricardo/projects/appengine-fixture-loader/.env/bin/python2 339 | Also creating executable in /export/home/ricardo/projects/appengine-fixture-loader/.env/bin/python 340 | (...) 341 | ‘/export/home/ricardo/projects/appengine-fixture-loader/.env/bin/run_tests.py’ -> ‘/export/home/ricardo/projects/appengine-fixture-loader/.env/lib/google_appengine/run_tests.py’ 342 | ‘/export/home/ricardo/projects/appengine-fixture-loader/.env/bin/wrapper_util.py’ -> ‘/export/home/ricardo/projects/appengine-fixture-loader/.env/lib/google_appengine/wrapper_util.py’ 343 | $ source .env/bin/activate 344 | (.env) $ nosetests 345 | .............. 346 | ---------------------------------------------------------------------- 347 | Ran 14 tests in 2.708s 348 | 349 | OK 350 | -------------------------------------------------------------------------------- /appengine_fixture_loader/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.1' 2 | -------------------------------------------------------------------------------- /appengine_fixture_loader/loader.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tools to automate loading of test fixtures 3 | """ 4 | 5 | import json 6 | from datetime import datetime, time, date 7 | 8 | from google.appengine.ext.ndb.model import (DateTimeProperty, DateProperty, 9 | TimeProperty) 10 | 11 | 12 | def _sensible_value(attribute_type, value): 13 | if type(attribute_type) is DateTimeProperty: 14 | retval = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S') 15 | elif type(attribute_type) is TimeProperty: 16 | try: 17 | dt = datetime.strptime(value, '%H:%M:%S') 18 | except ValueError: 19 | dt = datetime.strptime(value, '%H:%M') 20 | retval = time(dt.hour, dt.minute, dt.second) 21 | elif type(attribute_type) is DateProperty: 22 | dt = datetime.strptime(value, '%Y-%m-%d') 23 | retval = date(dt.year, dt.month, dt.day) 24 | else: 25 | retval = value 26 | 27 | return retval 28 | 29 | 30 | def load_fixture(filename, kind, post_processor=None): 31 | """ 32 | Loads a file into entities of a given class, run the post_processor on each 33 | instance before it's saved 34 | """ 35 | 36 | def _load(od, kind, post_processor, parent=None, presets={}): 37 | """ 38 | Loads a single dictionary (od) into an object, overlays the values in 39 | presets, persists it and 40 | calls itself on the objects in __children__* keys 41 | """ 42 | if hasattr(kind, 'keys'): # kind is a map 43 | objtype = kind[od['__kind__']] 44 | else: 45 | objtype = kind 46 | 47 | obj_id = od.get('__id__') 48 | if obj_id is not None: 49 | obj = objtype(id=obj_id, parent=parent) 50 | else: 51 | obj = objtype(parent=parent) 52 | 53 | # Iterate over the non-special attributes and overlay the presets 54 | for attribute_name in [k for k in od.keys() 55 | if not k.startswith('__') and 56 | not k.endswith('__')] + presets.keys(): 57 | attribute_type = objtype.__dict__[attribute_name] 58 | attribute_value = _sensible_value(attribute_type, 59 | presets.get( 60 | attribute_name, 61 | od.get(attribute_name))) 62 | obj.__dict__['_values'][attribute_name] = attribute_value 63 | 64 | if post_processor: 65 | post_processor(obj) 66 | 67 | # Saving obj is required to continue with the children 68 | obj.put() 69 | 70 | loaded = [obj] 71 | 72 | # Process ancestor-based __children__ 73 | for item in od.get('__children__', []): 74 | loaded.extend(_load(item, kind, post_processor, parent=obj.key)) 75 | 76 | # Process other __children__[key]__ items 77 | for child_attribute_name in [k for k in od.keys() 78 | if k.startswith('__children__') 79 | and k != '__children__']: 80 | attribute_name = child_attribute_name.split('__')[-2] 81 | 82 | for child in od[child_attribute_name]: 83 | loaded.extend(_load(child, kind, post_processor, 84 | presets={attribute_name: obj.key})) 85 | 86 | return loaded 87 | 88 | tree = json.load(open(filename)) 89 | 90 | loaded = [] 91 | 92 | # Start with the top-level of the tree 93 | for item in tree: 94 | loaded.extend(_load(item, kind, post_processor)) 95 | 96 | return loaded 97 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/appengine_fixture_loader.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/appengine_fixture_loader.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/appengine_fixture_loader" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/appengine_fixture_loader" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # appengine_fixture_loader documentation build configuration file, created by 5 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | 19 | # If extensions (or modules to document with autodoc) are in another 20 | # directory, add these directories to sys.path here. If the directory is 21 | # relative to the documentation root, use os.path.abspath to make it 22 | # absolute, like shown here. 23 | #sys.path.insert(0, os.path.abspath('.')) 24 | 25 | # Get the project root dir, which is the parent dir of this 26 | cwd = os.getcwd() 27 | project_root = os.path.dirname(cwd) 28 | 29 | # Insert the project root dir as the first element in the PYTHONPATH. 30 | # This lets us ensure that the source package is imported, and that its 31 | # version is used. 32 | sys.path.insert(0, project_root) 33 | 34 | import appengine_fixture_loader 35 | 36 | # -- General configuration --------------------------------------------- 37 | 38 | # If your documentation needs a minimal Sphinx version, state it here. 39 | #needs_sphinx = '1.0' 40 | 41 | # Add any Sphinx extension module names here, as strings. They can be 42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 43 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] 44 | 45 | # Add any paths that contain templates here, relative to this directory. 46 | templates_path = ['_templates'] 47 | 48 | # The suffix of source filenames. 49 | source_suffix = '.rst' 50 | 51 | # The encoding of source files. 52 | #source_encoding = 'utf-8-sig' 53 | 54 | # The master toctree document. 55 | master_doc = 'index' 56 | 57 | # General information about the project. 58 | project = u'Python Boilerplate' 59 | copyright = u'2014, Ricardo Bánffy' 60 | 61 | # The version info for the project you're documenting, acts as replacement 62 | # for |version| and |release|, also used in various other places throughout 63 | # the built documents. 64 | # 65 | # The short X.Y version. 66 | version = appengine_fixture_loader.__version__ 67 | # The full version, including alpha/beta/rc tags. 68 | release = appengine_fixture_loader.__version__ 69 | 70 | # The language for content autogenerated by Sphinx. Refer to documentation 71 | # for a list of supported languages. 72 | #language = None 73 | 74 | # There are two options for replacing |today|: either, you set today to 75 | # some non-false value, then it is used: 76 | #today = '' 77 | # Else, today_fmt is used as the format for a strftime call. 78 | #today_fmt = '%B %d, %Y' 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | exclude_patterns = ['_build'] 83 | 84 | # The reST default role (used for this markup: `text`) to use for all 85 | # documents. 86 | #default_role = None 87 | 88 | # If true, '()' will be appended to :func: etc. cross-reference text. 89 | #add_function_parentheses = True 90 | 91 | # If true, the current module name will be prepended to all description 92 | # unit titles (such as .. function::). 93 | #add_module_names = True 94 | 95 | # If true, sectionauthor and moduleauthor directives will be shown in the 96 | # output. They are ignored by default. 97 | #show_authors = False 98 | 99 | # The name of the Pygments (syntax highlighting) style to use. 100 | pygments_style = 'sphinx' 101 | 102 | # A list of ignored prefixes for module index sorting. 103 | #modindex_common_prefix = [] 104 | 105 | # If true, keep warnings as "system message" paragraphs in the built 106 | # documents. 107 | #keep_warnings = False 108 | 109 | 110 | # -- Options for HTML output ------------------------------------------- 111 | 112 | # The theme to use for HTML and HTML Help pages. See the documentation for 113 | # a list of builtin themes. 114 | html_theme = 'default' 115 | 116 | # Theme options are theme-specific and customize the look and feel of a 117 | # theme further. For a list of options available for each theme, see the 118 | # documentation. 119 | #html_theme_options = {} 120 | 121 | # Add any paths that contain custom themes here, relative to this directory. 122 | #html_theme_path = [] 123 | 124 | # The name for this set of Sphinx documents. If None, it defaults to 125 | # " v documentation". 126 | #html_title = None 127 | 128 | # A shorter title for the navigation bar. Default is the same as 129 | # html_title. 130 | #html_short_title = None 131 | 132 | # The name of an image file (relative to this directory) to place at the 133 | # top of the sidebar. 134 | #html_logo = None 135 | 136 | # The name of an image file (within the static path) to use as favicon 137 | # of the docs. This file should be a Windows icon file (.ico) being 138 | # 16x16 or 32x32 pixels large. 139 | #html_favicon = None 140 | 141 | # Add any paths that contain custom static files (such as style sheets) 142 | # here, relative to this directory. They are copied after the builtin 143 | # static files, so a file named "default.css" will overwrite the builtin 144 | # "default.css". 145 | html_static_path = ['_static'] 146 | 147 | # If not '', a 'Last updated on:' timestamp is inserted at every page 148 | # bottom, using the given strftime format. 149 | #html_last_updated_fmt = '%b %d, %Y' 150 | 151 | # If true, SmartyPants will be used to convert quotes and dashes to 152 | # typographically correct entities. 153 | #html_use_smartypants = True 154 | 155 | # Custom sidebar templates, maps document names to template names. 156 | #html_sidebars = {} 157 | 158 | # Additional templates that should be rendered to pages, maps page names 159 | # to template names. 160 | #html_additional_pages = {} 161 | 162 | # If false, no module index is generated. 163 | #html_domain_indices = True 164 | 165 | # If false, no index is generated. 166 | #html_use_index = True 167 | 168 | # If true, the index is split into individual pages for each letter. 169 | #html_split_index = False 170 | 171 | # If true, links to the reST sources are added to the pages. 172 | #html_show_sourcelink = True 173 | 174 | # If true, "Created using Sphinx" is shown in the HTML footer. 175 | # Default is True. 176 | #html_show_sphinx = True 177 | 178 | # If true, "(C) Copyright ..." is shown in the HTML footer. 179 | # Default is True. 180 | #html_show_copyright = True 181 | 182 | # If true, an OpenSearch description file will be output, and all pages 183 | # will contain a tag referring to it. The value of this option 184 | # must be the base URL from which the finished HTML is served. 185 | #html_use_opensearch = '' 186 | 187 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 188 | #html_file_suffix = None 189 | 190 | # Output file base name for HTML help builder. 191 | htmlhelp_basename = 'appengine_fixture_loaderdoc' 192 | 193 | 194 | # -- Options for LaTeX output ------------------------------------------ 195 | 196 | latex_elements = { 197 | # The paper size ('letterpaper' or 'a4paper'). 198 | #'papersize': 'letterpaper', 199 | 200 | # The font size ('10pt', '11pt' or '12pt'). 201 | #'pointsize': '10pt', 202 | 203 | # Additional stuff for the LaTeX preamble. 204 | #'preamble': '', 205 | } 206 | 207 | # Grouping the document tree into LaTeX files. List of tuples 208 | # (source start file, target name, title, author, documentclass 209 | # [howto/manual]). 210 | latex_documents = [ 211 | ('index', 'appengine_fixture_loader.tex', 212 | u'Python Boilerplate Documentation', 213 | u'Ricardo Bánffy', 'manual'), 214 | ] 215 | 216 | # The name of an image file (relative to this directory) to place at 217 | # the top of the title page. 218 | #latex_logo = None 219 | 220 | # For "manual" documents, if this is true, then toplevel headings 221 | # are parts, not chapters. 222 | #latex_use_parts = False 223 | 224 | # If true, show page references after internal links. 225 | #latex_show_pagerefs = False 226 | 227 | # If true, show URL addresses after external links. 228 | #latex_show_urls = False 229 | 230 | # Documents to append as an appendix to all manuals. 231 | #latex_appendices = [] 232 | 233 | # If false, no module index is generated. 234 | #latex_domain_indices = True 235 | 236 | 237 | # -- Options for manual page output ------------------------------------ 238 | 239 | # One entry per manual page. List of tuples 240 | # (source start file, name, description, authors, manual section). 241 | man_pages = [ 242 | ('index', 'appengine_fixture_loader', 243 | u'Python Boilerplate Documentation', 244 | [u'Ricardo Bánffy'], 1) 245 | ] 246 | 247 | # If true, show URL addresses after external links. 248 | #man_show_urls = False 249 | 250 | 251 | # -- Options for Texinfo output ---------------------------------------- 252 | 253 | # Grouping the document tree into Texinfo files. List of tuples 254 | # (source start file, target name, title, author, 255 | # dir menu entry, description, category) 256 | texinfo_documents = [ 257 | ('index', 'appengine_fixture_loader', 258 | u'Python Boilerplate Documentation', 259 | u'Ricardo Bánffy', 260 | 'appengine_fixture_loader', 261 | 'One line description of project.', 262 | 'Miscellaneous'), 263 | ] 264 | 265 | # Documents to append as an appendix to all manuals. 266 | #texinfo_appendices = [] 267 | 268 | # If false, no module index is generated. 269 | #texinfo_domain_indices = True 270 | 271 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 272 | #texinfo_show_urls = 'footnote' 273 | 274 | # If true, do not generate a @detailmenu in the "Top" node's menu. 275 | #texinfo_no_detailmenu = False 276 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. appengine_fixture_loader documentation master file, created by 2 | sphinx-quickstart on Tue Jul 9 22:26:36 2013. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to App Engine Fixture Loader's documentation! 7 | ====================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | readme 15 | installation 16 | usage 17 | contributing 18 | authors 19 | history 20 | 21 | Indices and tables 22 | ================== 23 | 24 | * :ref:`genindex` 25 | * :ref:`modindex` 26 | * :ref:`search` 27 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | At the command line:: 6 | 7 | $ easy_install appengine_fixture_loader 8 | 9 | Or, if you have virtualenvwrapper installed:: 10 | 11 | $ mkvirtualenv appengine_fixture_loader 12 | $ pip install appengine_fixture_loader 13 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\appengine_fixture_loader.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\appengine_fixture_loader.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Usage 3 | ======== 4 | 5 | To use App Engine Fixture Loader in a project:: 6 | 7 | import appengine_fixture_loader 8 | -------------------------------------------------------------------------------- /resources/Makefile: -------------------------------------------------------------------------------- 1 | # Override VERSION if not defined 2 | VERSION?=$(shell resources/get_current_sdk_version.sh) 3 | 4 | # Override VENV if not defined 5 | VENV?=.env 6 | 7 | help: 8 | @echo "Please use \`make ' where is one of" 9 | @echo " venv to build the working virtual environment, and to install requirements for deployment" 10 | @echo " pycodestyle to run pycodestyle on the src and tests folders" 11 | @echo " pyflakes to run pyflakes on the src and tests folders" 12 | @echo " clean_venv to remove the created virtualenv folder" 13 | @echo " travis runs all tests (Travis CI hook)" 14 | 15 | # Consciously avoiding "all" target because we may want to use it for building 16 | # the actual product rather than a sane testing environment 17 | venv: virtualenv requirements appenginesdk 18 | 19 | # Creates the virtualenv, adds 'src' to the python path 20 | virtualenv: 21 | virtualenv $(CURDIR)/$(VENV) 22 | cp $(CURDIR)/resources/autogenerated $(CURDIR)/$(VENV)/lib/python2.7/site-packages/src.pth 23 | echo "$(CURDIR)/src/" >> $(CURDIR)/$(VENV)/lib/python2.7/site-packages/src.pth 24 | 25 | # Install all modules that will be used during development. 26 | requirements: 27 | $(CURDIR)/$(VENV)/bin/pip install --cache-dir $(CURDIR)/cache -r $(CURDIR)/resources/requirements.txt 28 | 29 | # Download the current Appengine SDK, unpack it in the virtualenv's 30 | # lib directory and add it and its patches to the gae.pth file that'll 31 | # set up the virtualenv's path. 32 | appenginesdk: virtualenv directories 33 | @wget -c https://storage.googleapis.com/appengine-sdks/featured/google_appengine_$(VERSION).zip -O $(CURDIR)/cache/google_appengine_$(VERSION).zip 34 | @unzip -q -o $(CURDIR)/cache/google_appengine_$(VERSION).zip -d $(CURDIR)/$(VENV)/lib 35 | @ln -svf $(CURDIR)/$(VENV)/lib/google_appengine/*.py $(CURDIR)/$(VENV)/bin/ 36 | @cp $(CURDIR)/resources/autogenerated $(CURDIR)/$(VENV)/lib/python2.7/site-packages/gae.pth 37 | @echo "$(CURDIR)/$(VENV)/lib/google_appengine/" >> $(CURDIR)/$(VENV)/lib/python2.7/site-packages/gae.pth 38 | @echo "import dev_appserver; dev_appserver.fix_sys_path()" >> $(CURDIR)/$(VENV)/lib/python2.7/site-packages/gae.pth 39 | 40 | # This creates directories that will be used during setup processes. 41 | directories: 42 | mkdir -p $(CURDIR)/build $(CURDIR)/cache 43 | 44 | # A useful target for running pycodestyle your source tree 45 | pycodestyle: 46 | find $(CURDIR)/src/ -name *.py -exec pycodestyle {} \; 47 | find $(CURDIR)/tests/ -name *.py -exec pycodestyle {} \; 48 | 49 | # The same for pyflakes 50 | pyflakes: 51 | find $(CURDIR)/src/ -name *.py -exec pyflakes {} \; 52 | find $(CURDIR)/tests/ -name *.py -exec pyflakes {} \; 53 | 54 | clean_dirs: 55 | rm -rf $(CURDIR)/build/* 56 | 57 | clean_cache: 58 | rm -rf $(CURDIR)/cache/* 59 | 60 | # Also avoiding the "clean" target for the reasons described at the "venv" target 61 | # Deletes the virtualenv 62 | clean_venv: clean_dirs 63 | rm -rf $(CURDIR)/$(VENV) 64 | 65 | travis: venv 66 | .env/bin/nosetests 67 | -------------------------------------------------------------------------------- /resources/autogenerated: -------------------------------------------------------------------------------- 1 | # This file was auto-generated. Avoid changing it 2 | -------------------------------------------------------------------------------- /resources/get_current_sdk_version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | LAST_KNOWN_GOOD_VERSION="1.9.40" 4 | 5 | if [ -f $(which curl) ] 6 | then 7 | VERSION=$(curl -s https://storage.googleapis.com/appengine-sdks/featured/VERSION | grep release | awk -F '\"' '{print $2}') 8 | else 9 | VERSION=$(wget -q -O - https://storage.googleapis.com/appengine-sdks/featured/VERSION | grep release | awk -F '\"' '{print $2}') 10 | fi 11 | 12 | if [ VERSION="0.0.0" ] 13 | then 14 | VERSION=$LAST_KNOWN_GOOD_VERSION 15 | fi 16 | 17 | echo $VERSION 18 | -------------------------------------------------------------------------------- /resources/requirements.txt: -------------------------------------------------------------------------------- 1 | ipdb 2 | nose 3 | coverage 4 | pyflakes 5 | pycodestyle 6 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | try: 5 | from setuptools import setup 6 | except ImportError: 7 | from distutils.core import setup 8 | 9 | from codecs import open 10 | 11 | readme = open('README.rst').read() 12 | history = open('HISTORY.rst').read().replace('.. :changelog:', '') 13 | 14 | requirements = [ 15 | # TODO: put package requirements here 16 | ] 17 | 18 | test_requirements = [ 19 | # TODO: put package test requirements here 20 | ] 21 | 22 | setup( 23 | name='Appengine-Fixture-Loader', 24 | version='0.1.9', 25 | description='Appengine fixture loader', 26 | long_description=readme + '\n\n' + history, 27 | author='Ricardo Bánffy', 28 | author_email='appengine-fixture-loader@autonomic.com.br', 29 | url='http://github.com/rbanffy/appengine-fixture-loader/', 30 | packages=[ 31 | 'appengine_fixture_loader', 32 | ], 33 | package_dir={'appengine_fixture_loader': 34 | 'appengine_fixture_loader'}, 35 | include_package_data=True, 36 | install_requires=requirements, 37 | license="Apache", 38 | zip_safe=False, 39 | keywords=['appengine', 'loader', 'fixture'], 40 | classifiers=[ 41 | 'Development Status :: 2 - Pre-Alpha', 42 | 'Environment :: Web Environment', 43 | 'Intended Audience :: Developers', 44 | 'License :: OSI Approved :: Apache Software License', 45 | 'Natural Language :: English', 46 | 'Operating System :: OS Independent', 47 | 'Programming Language :: Python :: 2.7', 48 | 'Programming Language :: Python', 49 | 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 50 | 'Topic :: Software Development :: Libraries :: Python Modules' 51 | ], 52 | test_suite='tests', 53 | tests_require=test_requirements 54 | ) 55 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rbanffy/appengine-fixture-loader/ae7dc44733ff0ad13411aec21f8badd2b95b90d2/tests/__init__.py -------------------------------------------------------------------------------- /tests/ancestor_tests.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the one-level, multi-type loader 3 | """ 4 | 5 | import datetime 6 | import unittest 7 | 8 | # The test will error out if we can't import these items 9 | from google.appengine.ext import ndb 10 | from google.appengine.ext import testbed 11 | 12 | from appengine_fixture_loader.loader import load_fixture 13 | 14 | 15 | class Person(ndb.Model): 16 | """Our sample class""" 17 | first_name = ndb.StringProperty() 18 | last_name = ndb.StringProperty() 19 | born = ndb.DateTimeProperty() 20 | userid = ndb.IntegerProperty() 21 | thermostat_set_to = ndb.FloatProperty() 22 | snores = ndb.BooleanProperty() 23 | started_school = ndb.DateProperty() 24 | sleeptime = ndb.TimeProperty() 25 | favorite_movies = ndb.JsonProperty() 26 | processed = ndb.BooleanProperty(default=False) 27 | 28 | 29 | class Dog(ndb.Model): 30 | """Another sample class""" 31 | name = ndb.StringProperty() 32 | processed = ndb.BooleanProperty(default=False) 33 | 34 | 35 | class AncestorLoaderTest(unittest.TestCase): 36 | """Tests if we can load a JSON file containing __children__""" 37 | def setUp(self): 38 | self.testbed = testbed.Testbed() 39 | self.testbed.activate() 40 | self.testbed.init_datastore_v3_stub() 41 | self.testbed.init_memcache_stub() 42 | self.loaded_data = load_fixture('tests/ancestors_and_dogs.json', 43 | {'Person': Person, 'Dog': Dog}) 44 | 45 | def tearDown(self): 46 | self.testbed.deactivate() 47 | 48 | def test_loaded_count(self): 49 | """Make sure we got 3 total objects from the JSON file""" 50 | self.assertEqual(len(self.loaded_data), 3) 51 | 52 | def test_loaded(self): 53 | """Check whether the attributes we imported match the JSON contents""" 54 | # Test if John got in 55 | john = Person.query(Person.first_name == 'John').get() 56 | self.assertEqual(john.first_name, 'John') 57 | self.assertEqual(john.last_name, 'Doe') 58 | self.assertEqual(john.born, datetime.datetime(1968, 3, 3)) 59 | self.assertEqual(john.thermostat_set_to, 18.34) 60 | self.assertFalse(john.processed) 61 | 62 | def test_multiple_children(self): 63 | """Tests if multiple children were correctly imported""" 64 | 65 | # Get John 66 | john = Person.query(Person.first_name == 'John').get() 67 | 68 | # Test whether Fido got in 69 | fido = Dog.query(ancestor=john.key).get() 70 | self.assertEqual(fido.name, 'Fido') 71 | 72 | # Test whether Alice got in 73 | jane = Person.query(Person.first_name == 'Jane').get() 74 | self.assertEqual(jane.key.parent(), john.key) 75 | 76 | 77 | if __name__ == '__main__': 78 | unittest.main() 79 | -------------------------------------------------------------------------------- /tests/ancestors_and_dogs.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "__kind__": "Person", 4 | "born": "1968-03-03T00:00:00", 5 | "first_name": "John", 6 | "last_name": "Doe", 7 | "favorite_movies": [ 8 | "2001", 9 | "The Day The Earth Stood Still (1951)" 10 | ], 11 | "snores": false, 12 | "sleeptime": "23:00", 13 | "started_school": "1974-02-15", 14 | "thermostat_set_to": 18.34, 15 | "userid": 1, 16 | "__children__": [ 17 | { 18 | "__kind__": "Dog", 19 | "name": "Fido" 20 | }, 21 | { 22 | "__kind__": "Person", 23 | "first_name": "Jane" 24 | } 25 | ] 26 | } 27 | ] 28 | -------------------------------------------------------------------------------- /tests/hard_coded_id.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "__kind__": "Person", 4 | "__id__": "jdoe", 5 | "born": "1968-03-03T00:00:00", 6 | "first_name": "John", 7 | "last_name": "Doe", 8 | "favorite_movies": [ 9 | "2001", 10 | "The Day The Earth Stood Still (1951)" 11 | ], 12 | "snores": false, 13 | "sleeptime": "23:00", 14 | "started_school": "1974-02-15", 15 | "thermostat_set_to": 18.34, 16 | "userid": 1 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /tests/hard_coded_id_test.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the one-level, multi-type loader 3 | """ 4 | 5 | import datetime 6 | import unittest 7 | 8 | # The test will error out if we can't import these items 9 | from google.appengine.ext import ndb 10 | from google.appengine.ext import testbed 11 | 12 | from appengine_fixture_loader.loader import load_fixture 13 | 14 | 15 | class Person(ndb.Model): 16 | """Our sample class""" 17 | first_name = ndb.StringProperty() 18 | last_name = ndb.StringProperty() 19 | born = ndb.DateTimeProperty() 20 | userid = ndb.IntegerProperty() 21 | thermostat_set_to = ndb.FloatProperty() 22 | snores = ndb.BooleanProperty() 23 | started_school = ndb.DateProperty() 24 | sleeptime = ndb.TimeProperty() 25 | favorite_movies = ndb.JsonProperty() 26 | processed = ndb.BooleanProperty(default=False) 27 | 28 | 29 | class AncestorLoaderTest(unittest.TestCase): 30 | """Tests if we can load a JSON file containing __children__""" 31 | def setUp(self): 32 | self.testbed = testbed.Testbed() 33 | self.testbed.activate() 34 | self.testbed.init_datastore_v3_stub() 35 | self.testbed.init_memcache_stub() 36 | self.loaded_data = load_fixture('tests/hard_coded_id.json', Person) 37 | 38 | def tearDown(self): 39 | self.testbed.deactivate() 40 | 41 | def test_loaded(self): 42 | """Check whether the attributes we imported match the JSON contents""" 43 | # Test if John got in 44 | john_key = ndb.Key('Person', 'jdoe') 45 | john = john_key.get() 46 | self.assertEqual(john.first_name, 'John') 47 | self.assertEqual(john.last_name, 'Doe') 48 | self.assertEqual(john.born, datetime.datetime(1968, 3, 3)) 49 | self.assertEqual(john.thermostat_set_to, 18.34) 50 | self.assertFalse(john.processed) 51 | 52 | 53 | if __name__ == '__main__': 54 | unittest.main() 55 | -------------------------------------------------------------------------------- /tests/multi_kind_tests.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the one-level, multi-type loader 3 | """ 4 | 5 | import datetime 6 | import unittest 7 | 8 | # The test will error out if we can't import these items 9 | from google.appengine.ext import ndb 10 | from google.appengine.ext import testbed 11 | 12 | from appengine_fixture_loader.loader import load_fixture 13 | 14 | 15 | class Person(ndb.Model): 16 | """Our sample class""" 17 | first_name = ndb.StringProperty() 18 | last_name = ndb.StringProperty() 19 | born = ndb.DateTimeProperty() 20 | userid = ndb.IntegerProperty() 21 | thermostat_set_to = ndb.FloatProperty() 22 | snores = ndb.BooleanProperty() 23 | started_school = ndb.DateProperty() 24 | sleeptime = ndb.TimeProperty() 25 | favorite_movies = ndb.JsonProperty() 26 | processed = ndb.BooleanProperty(default=False) 27 | 28 | 29 | class Dog(ndb.Model): 30 | """Another sample class""" 31 | name = ndb.StringProperty() 32 | processed = ndb.BooleanProperty(default=False) 33 | 34 | 35 | class MultiLoaderTest(unittest.TestCase): 36 | """Tests if we can load a JSON file with more than one kind""" 37 | def setUp(self): 38 | self.testbed = testbed.Testbed() 39 | self.testbed.activate() 40 | self.testbed.init_datastore_v3_stub() 41 | self.testbed.init_memcache_stub() 42 | self.loaded_data = load_fixture('tests/persons_and_dogs.json', 43 | {'Person': Person, 'Dog': Dog}) 44 | 45 | def tearDown(self): 46 | self.testbed.deactivate() 47 | 48 | def test_loaded_count(self): 49 | """Make sure we got 2 objects from the JSON file""" 50 | self.assertEqual(len(self.loaded_data), 2) 51 | 52 | def test_loaded(self): 53 | """Check whether the attributes we imported match the JSON contents""" 54 | # Test if the Person got in 55 | person = Person.query(Person.first_name == 'John').get() 56 | self.assertEqual(person.first_name, 'John') 57 | self.assertEqual(person.last_name, 'Doe') 58 | self.assertEqual(person.born, datetime.datetime(1968, 3, 3)) 59 | self.assertEqual(person.thermostat_set_to, 18.34) 60 | self.assertFalse(person.processed) 61 | 62 | # Test if the Dog got in 63 | dog = Dog.query(Dog.name == 'Fido').get() 64 | self.assertEqual(dog.name, 'Fido') 65 | 66 | 67 | class ProcessedMultiLoaderTest(unittest.TestCase): 68 | """Tests if we can load a JSON file and post-process it""" 69 | def setUp(self): 70 | 71 | def process(p): 72 | p.processed = True 73 | 74 | self.testbed = testbed.Testbed() 75 | self.testbed.activate() 76 | self.testbed.init_datastore_v3_stub() 77 | self.testbed.init_memcache_stub() 78 | self.loaded_data = load_fixture( 79 | 'tests/persons_and_dogs.json', 80 | {'Person': Person, 'Dog': Dog}, 81 | post_processor=process 82 | ) 83 | 84 | def tearDown(self): 85 | self.testbed.deactivate() 86 | 87 | def test_loaded_count(self): 88 | """Make sure we got 2 objects from the JSON file""" 89 | self.assertEqual(len(self.loaded_data), 2) 90 | 91 | def test_loaded_types(self): 92 | """Make sure all objects we loaded were processed""" 93 | self.assertTrue(all([p.processed for p in self.loaded_data])) 94 | 95 | 96 | if __name__ == '__main__': 97 | unittest.main() 98 | -------------------------------------------------------------------------------- /tests/multi_level_tests.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the one-level, multi-type loader 3 | """ 4 | 5 | import datetime 6 | import unittest 7 | 8 | # The test will error out if we can't import these items 9 | from google.appengine.ext import ndb 10 | from google.appengine.ext import testbed 11 | 12 | from appengine_fixture_loader.loader import load_fixture 13 | 14 | 15 | class Person(ndb.Model): 16 | """Our sample class""" 17 | first_name = ndb.StringProperty() 18 | last_name = ndb.StringProperty() 19 | born = ndb.DateTimeProperty() 20 | userid = ndb.IntegerProperty() 21 | thermostat_set_to = ndb.FloatProperty() 22 | snores = ndb.BooleanProperty() 23 | started_school = ndb.DateProperty() 24 | sleeptime = ndb.TimeProperty() 25 | favorite_movies = ndb.JsonProperty() 26 | processed = ndb.BooleanProperty(default=False) 27 | appropriate_adult = ndb.KeyProperty() 28 | 29 | 30 | class Dog(ndb.Model): 31 | """Another sample class""" 32 | name = ndb.StringProperty() 33 | processed = ndb.BooleanProperty(default=False) 34 | owner = ndb.KeyProperty() 35 | 36 | 37 | class MultiLevelLoaderTest(unittest.TestCase): 38 | """Tests if we can load a JSON file with key-based hierarchies""" 39 | def setUp(self): 40 | self.testbed = testbed.Testbed() 41 | self.testbed.activate() 42 | self.testbed.init_datastore_v3_stub() 43 | self.testbed.init_memcache_stub() 44 | self.loaded_data = load_fixture('tests/persons_children_and_dogs.json', 45 | {'Person': Person, 'Dog': Dog}) 46 | 47 | def tearDown(self): 48 | self.testbed.deactivate() 49 | 50 | def test_loaded_count(self): 51 | """Make sure we got 7 total objects from the JSON file""" 52 | self.assertEqual(len(self.loaded_data), 7) 53 | 54 | def test_total_count(self): 55 | """Make sure we got 6 objects loaded""" 56 | self.assertEqual(Person.query().count(), 6) 57 | 58 | def test_loaded(self): 59 | """Check whether the attributes we imported match the JSON contents""" 60 | # Test if John got in 61 | john = Person.query(Person.first_name == 'John').get() 62 | self.assertEqual(john.first_name, 'John') 63 | self.assertEqual(john.last_name, 'Doe') 64 | self.assertEqual(john.born, datetime.datetime(1968, 3, 3)) 65 | self.assertEqual(john.thermostat_set_to, 18.34) 66 | self.assertFalse(john.processed) 67 | 68 | def test_single_children(self): 69 | """Tests if a single child was correctly imported""" 70 | 71 | # Get John 72 | john = Person.query(Person.first_name == 'John').get() 73 | 74 | # Test if Jane got in 75 | jane = Person.query(Person.appropriate_adult == john.key).get() 76 | self.assertEqual(jane.first_name, 'Jane') 77 | 78 | def test_multiple_children(self): 79 | """Tests if multiple children were correctly imported""" 80 | 81 | # Get Alice 82 | alice = Person.query(Person.first_name == 'Alice').get() 83 | self.assertEqual(alice.last_name, 'Schneier') 84 | 85 | # Get the good and evil twins 86 | self.assertEqual( 87 | Person.query(Person.appropriate_adult == alice.key).count(), 2) 88 | 89 | def test_child_of_a_different_type(self): 90 | """Tests a child record of a different kind""" 91 | charlie = Person.query(Person.first_name == 'Charlie').get() 92 | fido = Dog.query(Dog.owner == charlie.key).get() 93 | self.assertEqual(fido.name, 'Fido') 94 | 95 | 96 | if __name__ == '__main__': 97 | unittest.main() 98 | -------------------------------------------------------------------------------- /tests/persons.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "born": "1968-03-03T00:00:00", 4 | "first_name": "John", 5 | "last_name": "Doe", 6 | "favorite_movies": [ 7 | "2001", 8 | "The Day The Earth Stood Still (1951)" 9 | ], 10 | "snores": false, 11 | "sleeptime": "23:00", 12 | "started_school": "1974-02-15", 13 | "thermostat_set_to": 18.34, 14 | "userid": 1 15 | }, 16 | { 17 | "born": "1970-04-27T00:00:00", 18 | "first_name": "Jane", 19 | "last_name": "Doe", 20 | "favorite_movies": [ 21 | "2001", 22 | "Superman" 23 | ], 24 | "snores": false, 25 | "sleeptime": "22:30:30", 26 | "started_school": "1978-08-01", 27 | "thermostat_set_to": 23, 28 | "userid": 2 29 | }, 30 | { 31 | "born": "1999-09-19T00:00:00", 32 | "first_name": "Alice", 33 | "last_name": "Schneier", 34 | "favorite_movies": [ 35 | "2001", 36 | "Superman" 37 | ], 38 | "snores": true, 39 | "sleeptime": "22:00", 40 | "started_school": "1985-08-01", 41 | "thermostat_set_to": 18.34, 42 | "userid": 3 43 | }, 44 | { 45 | "born": "1980-05-25T00:00:00", 46 | "first_name": "Bob", 47 | "last_name": "Schneier", 48 | "favorite_movies": [ 49 | "2001", 50 | "Superman" 51 | ], 52 | "snores": true, 53 | "sleeptime": "22:00", 54 | "started_school": "1985-08-01", 55 | "thermostat_set_to": 18.34, 56 | "userid": -5 57 | } 58 | ] 59 | -------------------------------------------------------------------------------- /tests/persons_and_dogs.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "__kind__": "Person", 4 | "born": "1968-03-03T00:00:00", 5 | "first_name": "John", 6 | "last_name": "Doe", 7 | "favorite_movies": [ 8 | "2001", 9 | "The Day The Earth Stood Still (1951)" 10 | ], 11 | "snores": false, 12 | "sleeptime": "23:00", 13 | "started_school": "1974-02-15", 14 | "thermostat_set_to": 18.34, 15 | "userid": 1 16 | }, 17 | { 18 | "__kind__": "Dog", 19 | "name": "Fido" 20 | } 21 | ] 22 | -------------------------------------------------------------------------------- /tests/persons_children_and_dogs.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "__kind__": "Person", 4 | "born": "1968-03-03T00:00:00", 5 | "first_name": "John", 6 | "last_name": "Doe", 7 | "favorite_movies": [ 8 | "2001", 9 | "The Day The Earth Stood Still (1951)" 10 | ], 11 | "snores": false, 12 | "sleeptime": "23:00", 13 | "started_school": "1974-02-15", 14 | "thermostat_set_to": 18.34, 15 | "userid": 1, 16 | "__children__appropriate_adult__": [ 17 | { 18 | "__kind__": "Person", 19 | "born": "1970-04-27T00:00:00", 20 | "first_name": "Jane", 21 | "last_name": "Doe", 22 | "favorite_movies": [ 23 | "2001", 24 | "Superman" 25 | ], 26 | "snores": false, 27 | "sleeptime": "22:30:30", 28 | "started_school": "1978-08-01", 29 | "thermostat_set_to": 23, 30 | "userid": 2, 31 | "__children__appropriate_adult__": [ 32 | { 33 | "__kind__": "Person", 34 | "born": "1980-05-25T00:00:00", 35 | "first_name": "Bob", 36 | "last_name": "Schneier", 37 | "favorite_movies": [ 38 | "2001", 39 | "Superman" 40 | ], 41 | "snores": true, 42 | "sleeptime": "22:00", 43 | "started_school": "1985-08-01", 44 | "thermostat_set_to": 18.34, 45 | "userid": 3 46 | } 47 | ] 48 | } 49 | ] 50 | }, 51 | { 52 | "__kind__": "Person", 53 | "born": "1999-09-19T00:00:00", 54 | "first_name": "Alice", 55 | "last_name": "Schneier", 56 | "favorite_movies": [ 57 | "2001", 58 | "Superman" 59 | ], 60 | "snores": true, 61 | "sleeptime": "22:00", 62 | "started_school": "1985-08-01", 63 | "thermostat_set_to": 18.34, 64 | "userid": 4, 65 | "__children__appropriate_adult__": [ 66 | { 67 | "__kind__": "Person", 68 | "born": "1980-05-25T00:00:00", 69 | "first_name": "Bob", 70 | "last_name": "Schneier", 71 | "favorite_movies": [ 72 | "2001", 73 | "Superman" 74 | ], 75 | "snores": true, 76 | "sleeptime": "22:00", 77 | "started_school": "1985-08-01", 78 | "thermostat_set_to": 18.34, 79 | "userid": 5 80 | }, 81 | { 82 | "__kind__": "Person", 83 | "born": "1980-05-25T00:00:00", 84 | "first_name": "Charlie", 85 | "last_name": "Schneier", 86 | "favorite_movies": [ 87 | "Twins", 88 | "Goodbye Gemini" 89 | ], 90 | "snores": true, 91 | "sleeptime": "22:30", 92 | "started_school": "1985-08-01", 93 | "thermostat_set_to": 18.34, 94 | "userid": 6, 95 | "__children__owner__": [ 96 | { 97 | "__kind__": "Dog", 98 | "name": "Fido" 99 | } 100 | ] 101 | } 102 | ] 103 | } 104 | ] 105 | -------------------------------------------------------------------------------- /tests/single_level_tests.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the one-level loader 3 | """ 4 | 5 | import datetime 6 | import unittest 7 | 8 | # The test will error out if we can't import these items 9 | from google.appengine.ext import ndb 10 | from google.appengine.ext import testbed 11 | 12 | from appengine_fixture_loader.loader import load_fixture 13 | 14 | 15 | class Person(ndb.Model): 16 | """Our sample class""" 17 | first_name = ndb.StringProperty() 18 | last_name = ndb.StringProperty() 19 | born = ndb.DateTimeProperty() 20 | userid = ndb.IntegerProperty() 21 | thermostat_set_to = ndb.FloatProperty() 22 | snores = ndb.BooleanProperty() 23 | started_school = ndb.DateProperty() 24 | sleeptime = ndb.TimeProperty() 25 | favorite_movies = ndb.JsonProperty() 26 | processed = ndb.BooleanProperty(default=False) 27 | 28 | 29 | class LoaderTest(unittest.TestCase): 30 | """Tests if we can load a JSON file""" 31 | def setUp(self): 32 | self.testbed = testbed.Testbed() 33 | self.testbed.activate() 34 | self.testbed.init_datastore_v3_stub() 35 | self.testbed.init_memcache_stub() 36 | self.loaded_data = load_fixture('tests/persons.json', Person) 37 | 38 | def tearDown(self): 39 | self.testbed.deactivate() 40 | 41 | def test_loaded_count(self): 42 | """Make sure we got 4 objects from the JSON file""" 43 | self.assertEqual(len(self.loaded_data), 4) 44 | 45 | def test_loaded_types(self): 46 | """Make sure all objects we loaded are instances of Person""" 47 | self.assertTrue(all([type(p) == Person for p in self.loaded_data])) 48 | 49 | def test_loaded(self): 50 | """Check whether the attributes we imported match the JSON contents""" 51 | # Test if the first record got in 52 | person = Person.query(Person.first_name == 'John').get() 53 | self.assertEqual(person.first_name, 'John') 54 | self.assertEqual(person.last_name, 'Doe') 55 | self.assertEqual(person.born, datetime.datetime(1968, 3, 3)) 56 | self.assertEqual(person.thermostat_set_to, 18.34) 57 | self.assertFalse(person.processed) 58 | 59 | # Test for the third one 60 | person = Person.query(Person.last_name == 'Schneier' and 61 | Person.first_name == 'Alice').get() 62 | self.assertEqual(person.first_name, 'Alice') 63 | self.assertEqual(person.last_name, 'Schneier') 64 | self.assertEqual(person.born, datetime.datetime(1999, 9, 19)) 65 | self.assertTrue(person.snores) 66 | self.assertFalse(person.processed) 67 | 68 | # Test for the last one 69 | person = Person.query( 70 | Person.born == datetime.datetime(1980, 5, 25, 0, 0, 0)).get() 71 | self.assertEqual(person.first_name, 'Bob') 72 | self.assertEqual(person.last_name, 'Schneier') 73 | self.assertEqual(person.born, datetime.datetime(1980, 5, 25)) 74 | self.assertFalse(person.processed) 75 | 76 | 77 | class ProcessedLoaderTest(unittest.TestCase): 78 | """Tests if we can load a JSON file and post-process it""" 79 | def setUp(self): 80 | 81 | def process(p): 82 | p.processed = True 83 | 84 | self.testbed = testbed.Testbed() 85 | self.testbed.activate() 86 | self.testbed.init_datastore_v3_stub() 87 | self.testbed.init_memcache_stub() 88 | self.loaded_data = load_fixture( 89 | 'tests/persons.json', 90 | Person, 91 | post_processor=process 92 | ) 93 | 94 | def tearDown(self): 95 | self.testbed.deactivate() 96 | 97 | def test_loaded_count(self): 98 | """Make sure we got 4 objects from the JSON file""" 99 | self.assertEqual(len(self.loaded_data), 4) 100 | 101 | def test_loaded_types(self): 102 | """Make sure all objects we loaded were processed""" 103 | self.assertTrue(all([p.processed for p in self.loaded_data])) 104 | 105 | 106 | class ProcessedLoaderTest(unittest.TestCase): 107 | """Tests if the processor is invoked once per loaded entity""" 108 | def setUp(self): 109 | 110 | self.counter = 0 111 | 112 | def counted_process(p): 113 | p.processed = True 114 | self.counter += 1 115 | 116 | self.testbed = testbed.Testbed() 117 | self.testbed.activate() 118 | self.testbed.init_datastore_v3_stub() 119 | self.testbed.init_memcache_stub() 120 | self.loaded_data = load_fixture( 121 | 'tests/persons.json', 122 | Person, 123 | post_processor=counted_process 124 | ) 125 | 126 | def tearDown(self): 127 | self.testbed.deactivate() 128 | 129 | def test_single_invoke(self): 130 | """Make sure processor is invoked once per imported entity""" 131 | self.assertEqual(self.counter, 4) 132 | 133 | 134 | if __name__ == '__main__': 135 | unittest.main() 136 | --------------------------------------------------------------------------------