├── .coveragerc ├── .gitattributes ├── .github └── stale.yml ├── .gitignore ├── .pylintrc ├── .travis.yml ├── LICENSE ├── README.rst ├── developing.md ├── docs ├── Makefile ├── source │ ├── api-creation.rst │ ├── client.rst │ ├── conf.py │ ├── customisation.rst │ ├── developing.rst │ ├── index.rst │ ├── metadata.rst │ ├── readme.inc │ └── welcome.rst └── sphinx.sh ├── notes.md ├── pyramid_jsonapi ├── __init__.py ├── authoriser.py ├── callbacks_doc.py ├── collection_view.py ├── db_query.py ├── endpoints.py ├── filters.py ├── http_query.py ├── metadata │ ├── JSONSchema │ │ └── __init__.py │ ├── OpenAPI │ │ ├── __init__.py │ │ ├── swagger-ui │ │ │ └── index.mako │ │ └── update-swagger-ui.sh │ └── __init__.py ├── permissions.py ├── resource.py ├── schema │ └── jsonapi-schema.json ├── serialiser.py ├── unit_tests.py ├── version.py └── workflow │ ├── __init__.py │ ├── loop │ ├── __init__.py │ ├── collection_get.py │ ├── collection_post.py │ ├── item_delete.py │ ├── item_get.py │ ├── item_patch.py │ ├── related_get.py │ ├── relationships_delete.py │ ├── relationships_get.py │ ├── relationships_patch.py │ └── relationships_post.py │ └── selectin │ ├── __init__.py │ └── collection_get.py ├── setup.cfg ├── setup.py ├── test_project ├── CHANGES.txt ├── MANIFEST.in ├── README.txt ├── development.ini ├── production.ini ├── run-app.py ├── setup.cfg ├── setup.py ├── test-alt-schema.json ├── test_project │ ├── __init__.py │ ├── models.py │ ├── models2.py │ ├── play_objects.py │ ├── query_tests.py │ ├── scripts │ │ ├── __init__.py │ │ └── initializedb.py │ ├── static │ │ ├── favicon.ico │ │ ├── footerbg.png │ │ ├── headerbg.png │ │ ├── ie6.css │ │ ├── middlebg.png │ │ ├── pylons.css │ │ ├── pyramid-small.png │ │ ├── pyramid.png │ │ └── transparent.gif │ ├── templates │ │ └── mytemplate.pt │ ├── test-openapi.json │ ├── test_data.json │ ├── test_data.py │ ├── tests.py │ └── views.py └── testing.ini └── tox.ini /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | pyramid_jsonapi 4 | omit = 5 | pyramid_jsonapi/callbacks_doc.py 6 | pyramid_jsonapi/version.py 7 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 60 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 7 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - pinned 8 | - security 9 | - helpwanted 10 | - enhancement 11 | - bug 12 | - "on hold" 13 | # Label to use when marking an issue as stale 14 | staleLabel: stale 15 | # Comment to post when marking an issue as stale. Set to `false` to disable 16 | markComment: > 17 | This issue has been automatically marked as stale because it has not had 18 | recent activity. It will be closed if no further activity occurs. Thank you 19 | for your contributions. 20 | # Comment to post when closing a stale issue. Set to `false` to disable 21 | closeComment: false 22 | 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | dist/ 13 | downloads/ 14 | sdist/ 15 | var/ 16 | *.egg-info/ 17 | .installed.cfg 18 | 19 | # PyInstaller 20 | # Usually these files are written by a python script from a template 21 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 22 | *.manifest 23 | *.spec 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .coverage.* 34 | .cache 35 | nosetests.xml 36 | coverage.xml 37 | *,cover 38 | 39 | # Translations 40 | *.mo 41 | *.pot 42 | 43 | # Django stuff: 44 | *.log 45 | 46 | # Sphinx documentation 47 | docs/_build/ 48 | docs/source/apidoc 49 | docs/source/pylint-badge.svg 50 | 51 | # PyBuilder 52 | target/ 53 | 54 | # Random play files 55 | splat* 56 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [TYPECHECK] 2 | ignored-modules=pyramid_settings_wrapper 3 | ignored-classes=pyramid_jsonapi.settings.Settings,pyramid_jsonapi.metadata.MetaData 4 | 5 | [MESSAGES CONTROL] 6 | disable=line-too-long,too-few-public-methods 7 | # 'id' is a JSONAPI specification field name 8 | good-names=i,j,k,ex,Run,_,id 9 | 10 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | dist: bionic 2 | language: python 3 | python: 4 | - 3.7 5 | sudo: required 6 | services: 7 | - postgresql 8 | cache: 9 | pip: true 10 | install: 11 | - pip install tox 12 | script: 13 | # Call the pip installed tox, not the system one 14 | - $VIRTUAL_ENV/bin/tox 15 | deploy: 16 | provider: pypi 17 | user: "colinhiggs" 18 | on: 19 | tags: true 20 | password: 21 | secure: "YAq6KEOf4LnXgf9MkW0DRHLzxPGob5a6y/b38eQ/wD/KEL6rZjHvS92E1qEuTe+Et6RAoWz6e76j5ntQS9Qf7rJfIuxcqWl19ofXTmACcjHd6Evblqz8llGUlKTBKchkQ7yMm6tbkyYHLrUYgZPyr+0lB8G94wAh+qBd460kAMFEhPPOMCUDdtIA+jeU6dENFO9V/BFOBlOuoIEw6PLT96z 22 | gQCvUMza3O2Op4hT7zsyI9U/7jXM1RgV6/HF3TcyQTpTFBMoOeSrASTt6tT4LF7Ws3HL1HIe/Gy0smBseDP45csqP90uiqUvfv8P0lv31tgTrn2e5tjbTHhKeeN3th+hxyzwn2ss1XoH8gvcVdiq7+AKg41WXUgceAXsQfWXf53JjpmRzSw/fj5YBJ3x16gnVtUbZmDE9TnGneAyrwN6QTk56q2SUmDIj5 23 | ea+QCo3WnRcxHk0Ir0JUoBINW5HVoPlpT64eNmHftNiWSaDo7SaPfV1wIN9TbCJcm05IZtk3l8RiyWf/KQUGy3LRJRb1nZkq8dDPnPBcktWVRFjC55ibFEs6TtSXlX67PPVbNE8SU/bKOkzfcWiYT6bOXn9/t83LwoQj50sBq0052baI3l/fXQh8KfTCQflCsHQ8q4jU3mHIvbZSKzDQp3UNsi8cEM1CEw 24 | wFpYNnan9SIyD5HU=" 25 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | The pyramid-jsonapi project 2 | =========================== 3 | 4 | .. image:: https://img.shields.io/pypi/v/pyramid_jsonapi.svg 5 | :target: https://pypi.python.org/pypi/pyramid_jsonapi 6 | 7 | .. image:: https://img.shields.io/pypi/pyversions/pyramid_jsonapi.svg 8 | 9 | .. image:: https://travis-ci.org/colinhiggs/pyramid-jsonapi.svg?branch=master 10 | :target: https://travis-ci.org/colinhiggs/pyramid-jsonapi 11 | 12 | .. image:: https://coveralls.io/repos/github/colinhiggs/pyramid-jsonapi/badge.svg?branch=master 13 | :target: https://coveralls.io/github/colinhiggs/pyramid-jsonapi?branch=master 14 | 15 | .. image:: https://colinhiggs.github.io/pyramid-jsonapi/pylint-badge.svg 16 | 17 | Create a JSON-API (``_) standard API from a database using 18 | the sqlAlchemy ORM and pyramid framework. 19 | 20 | The core idea behind pyramid-jsonapi is to create a working JSON-API 21 | automatically, starting from the sort of ``models.py`` file shipped with a 22 | typical pyramid + sqlalchemy application. 23 | 24 | .. note:: 25 | 26 | The default branch of pyramid_jsonapi is now the 2.2 branch. 27 | 28 | Documentation 29 | ------------- 30 | 31 | Documentation is available at: ``_ 32 | 33 | Quick start 34 | =========== 35 | 36 | Installing `pyramid_jsonapi` 37 | ---------------------------- 38 | 39 | .. code-block:: bash 40 | 41 | pip install -i pyramid_jsonapi 42 | 43 | See :ref:`getting-started` for other installation options, including installing 44 | development versions. 45 | 46 | Auto-Creating an API 47 | -------------------- 48 | 49 | Declare your models somewhere using sqlalchemy's 50 | :func:`sqlalchemy.ext.declarative.declarative_base`. In this documentation we 51 | assume that you have done so in a file called ``models.py``: 52 | 53 | .. code-block:: python 54 | 55 | class Person(Base): 56 | __tablename__ = 'people' 57 | id = Column(BigInteger, primary_key=True, autoincrement=True) 58 | name = Column(Text) 59 | blogs = relationship('Blog', backref='owner') 60 | posts = relationship('Post', backref='author') 61 | 62 | # and the rest... 63 | 64 | If you are happy with the defaults, you can get away with the following 65 | additions to the standard pyramid alchemy scaffold's top level ``__init__.py``: 66 | 67 | .. code-block:: python 68 | 69 | import pyramid_jsonapi 70 | 71 | from . import models # Your models module. 72 | 73 | 74 | def main(global_config, **settings): 75 | 76 | # The usual stuff from the pyramid alchemy setup. 77 | config = Configurator(settings=settings) 78 | 79 | # pyramid_jsonapi uses the renderer labelled 'json'. As usual, if you have 80 | # any types to serialise that the default JSON renderer can't handle, you 81 | # must alter it. For example: 82 | # 83 | #renderer = JSON(sort_keys=True) 84 | #renderer.add_adapter(datetime.date, datetime_adapter) 85 | #config.add_renderer('json', renderer) 86 | 87 | # Instantiate a PyramidJSONAPI class instance. 88 | pj = pyramid_jsonapi.PyramidJSONAPI(config, models) 89 | 90 | # If you are using pyramid 1.7 or older, you will need to pass a third 91 | # argument to the constructor: a callable which accepts a CollectionView 92 | # instance as an argument and returns a sqlalchemy database session. 93 | # 94 | # For example: 95 | # pj = pyramid_jsonapi.PyramidJSONAPI( 96 | # config, models, lambda view: models.DBSession 97 | # ) 98 | 99 | # Create the routes and views automagically: 100 | pj.create_jsonapi_using_magic_and_pixie_dust() 101 | 102 | # Routes and views are added imperatively, so no need for a scan - unless 103 | # you have defined other routes and views declaratively. 104 | 105 | return config.make_wsgi_app() 106 | 107 | Or, without all the comments: 108 | 109 | .. code-block:: python 110 | 111 | import pyramid_jsonapi 112 | 113 | from . import models 114 | 115 | 116 | def main(global_config, **settings): 117 | config = Configurator(settings=settings) 118 | pj = pyramid_jsonapi.PyramidJSONAPI(config, models) 119 | pj.create_jsonapi_using_magic_and_pixie_dust() 120 | return config.make_wsgi_app() 121 | 122 | Yes, there really is a method called 123 | :func:`pyramid_jsonapi.PyramidJSONAPI.create_jsonapi_using_magic_and_pixie_dust`. No, you 124 | don't *have* to call it that. If you are feeling more sensible you can use the 125 | synonym :func:`pyramid_jsonapi.PyramidJSONAPI.create_jsonapi`. 126 | -------------------------------------------------------------------------------- /developing.md: -------------------------------------------------------------------------------- 1 | # Getting started 2 | 3 | ## Clone the repo: 4 | Although you've quite possibly already done that if you're here... 5 | ```bash 6 | git clone https://github.com/colinhiggs/pyramid-jsonapi.git 7 | ``` 8 | 9 | ## Install `tox` 10 | 11 | You need to install tox - either system-wide, or in it's own virtualenv: 12 | 13 | ```bash 14 | python3 -mvenv toxenv 15 | toxenv/bin/pip install tox 16 | ``` 17 | 18 | *Note*: This virtualenv only exists to deliver `tox`, *NOT* for development. 19 | 20 | 21 | 22 | ## Run `tox` 23 | 24 | `tox` creates it's own virtualenvs for testing in `.tox/` which can be used for code testing and development. 25 | These contain all of the dependencies for both the project and testing, as well as the local `pyramid-jsonapi` 26 | 27 | ```bash 28 | toxenv/bin/tox 29 | ``` 30 | 31 | Hopefully the tests all passed! 32 | 33 | 34 | ## Try it Out 35 | 36 | Begin by sourcing the tox virtualenv: 37 | 38 | ```bash 39 | source .tox/py3/bin/activate` 40 | ``` 41 | 42 | ### Create a test database 43 | 44 | Mumble mumble hand-wave... poke postgresql to create a new test database owned by an appropriate role. 45 | 46 | ### Edit development.ini to match database and role details 47 | 48 | The test project comes with the following sqlalchemy url: 49 | ```ini 50 | sqlalchemy.url = postgresql://test:test@localhost/test 51 | ``` 52 | which assumes there is a database called test owned by a user called test with password test. Highly imaginative stuff. 53 | 54 | ### Start a server running 55 | ```bash 56 | pserve test_project/development.ini 57 | ``` 58 | 59 | ### Populate with test data 60 | I use httpie [http://httpie.org/](http://httpie.org/) for manual poking and testing. Installing it gives you the command `http`. 61 | 62 | development.ini turns on certain debug features. One of them is to generate some database manipulating endpoints. 63 | 64 | To populate: 65 | ```bash 66 | http http://localhost:6543/debug/populate 67 | ``` 68 | 69 | There are also 'drop' to drop all tables and 'reset' to do a drop and then populate. 70 | 71 | The test data is defined in `test_project/test_project/test_data.json` 72 | 73 | ### Ask for something via the API 74 | ```bash 75 | http --verbose GET http://localhost:6543/api/people/1 76 | ``` 77 | 78 | which results in some http conversation: 79 | 80 | ``` 81 | GET /people/1 HTTP/1.1 82 | Accept: */* 83 | Accept-Encoding: gzip, deflate, compress 84 | Host: localhost:6543 85 | User-Agent: HTTPie/0.8.0 86 | 87 | 88 | 89 | HTTP/1.1 200 OK 90 | Content-Length: 1236 91 | Content-Type: application/vnd.api+json; charset=UTF-8 92 | Date: Fri, 03 Jun 2016 12:07:09 GMT 93 | Server: waitress 94 | ``` 95 | 96 | and the following JSON (listed here in a separate block so it will be coloured): 97 | 98 | ```json 99 | { 100 | "data": { 101 | "attributes": { 102 | "name": "alice" 103 | }, 104 | "id": "1", 105 | "links": { 106 | "self": "http://localhost:6543/people/1" 107 | }, 108 | "relationships": { 109 | "blogs": { 110 | "data": [ 111 | { 112 | "id": "1", 113 | "type": "blogs" 114 | }, 115 | { 116 | "id": "2", 117 | "type": "blogs" 118 | } 119 | ], 120 | "links": { 121 | "related": "http://localhost:6543/people/1/blogs", 122 | "self": "http://localhost:6543/people/1/relationships/blogs" 123 | }, 124 | "meta": { 125 | "direction": "ONETOMANY", 126 | "results": { 127 | "available": 2, 128 | "limit": 10, 129 | "returned": 2 130 | } 131 | } 132 | }, 133 | "comments": { 134 | "data": [ 135 | { 136 | "id": "1", 137 | "type": "comments" 138 | }, 139 | { 140 | "id": "3", 141 | "type": "comments" 142 | } 143 | ], 144 | "links": { 145 | "related": "http://localhost:6543/people/1/comments", 146 | "self": "http://localhost:6543/people/1/relationships/comments" 147 | }, 148 | "meta": { 149 | "direction": "ONETOMANY", 150 | "results": { 151 | "available": 2, 152 | "limit": 10, 153 | "returned": 2 154 | } 155 | } 156 | }, 157 | "posts": { 158 | "data": [ 159 | { 160 | "id": "1", 161 | "type": "posts" 162 | }, 163 | { 164 | "id": "2", 165 | "type": "posts" 166 | }, 167 | { 168 | "id": "3", 169 | "type": "posts" 170 | } 171 | ], 172 | "links": { 173 | "related": "http://localhost:6543/people/1/posts", 174 | "self": "http://localhost:6543/people/1/relationships/posts" 175 | }, 176 | "meta": { 177 | "direction": "ONETOMANY", 178 | "results": { 179 | "available": 3, 180 | "limit": 10, 181 | "returned": 3 182 | } 183 | } 184 | } 185 | }, 186 | "type": "people" 187 | }, 188 | "links": { 189 | "self": "http://localhost:6543/people/1" 190 | }, 191 | "meta": { 192 | "debug": { 193 | "accept_header": {}, 194 | "atts": { 195 | "name": null 196 | }, 197 | "includes": {}, 198 | "qinfo_page": {} 199 | } 200 | } 201 | } 202 | ``` 203 | That ugly debug information up there should be going away soon. 204 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | GHBUILDDIR = ../../pyramid-jsonapi-gh 10 | 11 | # User-friendly check for sphinx-build 12 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 13 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 14 | endif 15 | 16 | # Internal variables. 17 | PAPEROPT_a4 = -D latex_paper_size=a4 18 | PAPEROPT_letter = -D latex_paper_size=letter 19 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 20 | # the i18n builder cannot share the environment and doctrees with the others 21 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 22 | 23 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 24 | 25 | help: 26 | @echo "Please use \`make ' where is one of" 27 | @echo " gh to make HTML in $(GHBUILDDIR)" 28 | @echo " html to make standalone HTML files" 29 | @echo " dirhtml to make HTML files named index.html in directories" 30 | @echo " singlehtml to make a single large HTML file" 31 | @echo " pickle to make pickle files" 32 | @echo " json to make JSON files" 33 | @echo " htmlhelp to make HTML files and a HTML help project" 34 | @echo " qthelp to make HTML files and a qthelp project" 35 | @echo " devhelp to make HTML files and a Devhelp project" 36 | @echo " epub to make an epub" 37 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 38 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 39 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 40 | @echo " text to make text files" 41 | @echo " man to make manual pages" 42 | @echo " texinfo to make Texinfo files" 43 | @echo " info to make Texinfo files and run them through makeinfo" 44 | @echo " gettext to make PO message catalogs" 45 | @echo " changes to make an overview of all changed/added/deprecated items" 46 | @echo " xml to make Docutils-native XML files" 47 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 48 | @echo " linkcheck to check all external links for integrity" 49 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 50 | 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | gh: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(GHBUILDDIR) 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | html: 60 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 61 | @echo 62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 63 | 64 | dirhtml: 65 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 66 | @echo 67 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 68 | 69 | singlehtml: 70 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 71 | @echo 72 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 73 | 74 | pickle: 75 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 76 | @echo 77 | @echo "Build finished; now you can process the pickle files." 78 | 79 | json: 80 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 81 | @echo 82 | @echo "Build finished; now you can process the JSON files." 83 | 84 | htmlhelp: 85 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 86 | @echo 87 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 88 | ".hhp project file in $(BUILDDIR)/htmlhelp." 89 | 90 | qthelp: 91 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 92 | @echo 93 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 94 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 95 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyramid-jsonapi.qhcp" 96 | @echo "To view the help file:" 97 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyramid-jsonapi.qhc" 98 | 99 | devhelp: 100 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 101 | @echo 102 | @echo "Build finished." 103 | @echo "To view the help file:" 104 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pyramid-jsonapi" 105 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyramid-jsonapi" 106 | @echo "# devhelp" 107 | 108 | epub: 109 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 110 | @echo 111 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 112 | 113 | latex: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo 116 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 117 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 118 | "(use \`make latexpdf' here to do that automatically)." 119 | 120 | latexpdf: 121 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 122 | @echo "Running LaTeX files through pdflatex..." 123 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 124 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 125 | 126 | latexpdfja: 127 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 128 | @echo "Running LaTeX files through platex and dvipdfmx..." 129 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 130 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 131 | 132 | text: 133 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 134 | @echo 135 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 136 | 137 | man: 138 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 139 | @echo 140 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 141 | 142 | texinfo: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo 145 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 146 | @echo "Run \`make' in that directory to run these through makeinfo" \ 147 | "(use \`make info' here to do that automatically)." 148 | 149 | info: 150 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 151 | @echo "Running Texinfo files through makeinfo..." 152 | make -C $(BUILDDIR)/texinfo info 153 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 154 | 155 | gettext: 156 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 157 | @echo 158 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 159 | 160 | changes: 161 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 162 | @echo 163 | @echo "The overview file is in $(BUILDDIR)/changes." 164 | 165 | linkcheck: 166 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 167 | @echo 168 | @echo "Link check complete; look for any errors in the above output " \ 169 | "or in $(BUILDDIR)/linkcheck/output.txt." 170 | 171 | doctest: 172 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 173 | @echo "Testing of doctests in the sources finished, look at the " \ 174 | "results in $(BUILDDIR)/doctest/output.txt." 175 | 176 | xml: 177 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 178 | @echo 179 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 180 | 181 | pseudoxml: 182 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 183 | @echo 184 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 185 | -------------------------------------------------------------------------------- /docs/source/api-creation.rst: -------------------------------------------------------------------------------- 1 | .. _getting-started: 2 | 3 | Getting Started 4 | ================ 5 | 6 | Installation 7 | ------------ 8 | 9 | * Stable releases are uploaded to pypi: 10 | ``_. You 11 | can install it in the usual way: 12 | 13 | .. code-block:: bash 14 | 15 | pip install -i pyramid_jsonapi 16 | 17 | * Development releases are also uploaded to pypi. These have versions with 18 | '.devN' appended, where 'N' is the number of commits since the stable tag. You 19 | can install the latest one (perhaps into a virtualenv for play purposes) with 20 | 21 | .. code-block:: bash 22 | 23 | pip install --pre -i pyramid_jsonapi 24 | 25 | * See the :ref:`developing` documentation for details. 26 | 27 | * Since pyramid_jsonapi is pure python, You can download the latest code from 28 | ``_ and add the directory you 29 | downloaded/cloned to to your PYTHONPATH. 30 | 31 | Generating an API From Your Models 32 | ---------------------------------- 33 | 34 | First import the ``pyramid_jsonapi`` module and any model classes or modules which 35 | you would like to expose as API collection endpoints. In your application's 36 | ``__init__.py``: 37 | 38 | .. code-block:: python 39 | 40 | import pyramid_jsonapi 41 | from . import models 42 | 43 | Then instantiate an api object: 44 | 45 | .. code-block:: python 46 | 47 | pj_api = pyramid_jsonapi.PyramidJSONAPI(config, models, [get_db_session]) 48 | 49 | This is the class that encapsulates a whole API representing a set of models. 50 | The constructor has two mandatory and one optional arguments: 51 | 52 | * ``config`` is the usual Configurator object used in pyramid. 53 | 54 | * ``models`` can either be a module (as in the example above) defining classes 55 | which inherit from :py:func:`declarative_base` or an iterable of such classes. 56 | 57 | * ``get_dbsession`` (optional) should be a 58 | callable which accepts an instance of 59 | :class:`pyramid_jsonapi.CollectionViewBase` and returns a 60 | :class:`sqlalchemy.orm.session.Session` (or an equivalent, like a 61 | :func:`sqlalchemy.orm.scoped_session`) 62 | 63 | Once you have an instance of ``PyramidJSONAPI`` you instruct it to build 64 | endpoints (routes and views) with the method 65 | ``api.create_jsonapi_using_magic_and_pixie_dust()`` (or ``api.create_jsonapi()``). This 66 | is deliberately a two step affair to give you the chance to manipulate certain 67 | things (like the list of available endpoints) before the endpoints are 68 | constructed: 69 | 70 | .. code-block:: python 71 | 72 | pj_api = pyramid_jsonapi.PyramidJSONAPI(config, models) 73 | 74 | # Do something here like add a view for OPTIONS requests. 75 | 76 | pj_api.create_jsonapi_using_magic_and_pixie_dust() 77 | 78 | Auto-Create Assumptions 79 | ----------------------- 80 | #. Your model classes all inherit from a base class returned by sqlalchemy's 81 | ``declarative-base()``. 82 | 83 | #. Each model has a single primary_key column. This will be auto-detected and 84 | stored in ``__pyramid_jsonapi__`` dict attr in the model. 85 | 86 | #. Use a separate primary key for association objects rather than the 87 | composite key defined by the left and right referenced foreign keys. 88 | 89 | #. You are happy to give your collection end-points the same name as the 90 | corresponding database table (can be overridden). 91 | 92 | #. You have defined any relationships to exposed via the API using 93 | ``sqlalchemy.orm.relationship()`` (or ``backref()``). 94 | 95 | #. You are happy to expose any so defined relationship via a relationship URL. 96 | 97 | #. API endpoints will be provided at ``/api/...`` by default. 98 | 99 | #. Metadata endpoints will be provided at ``/metadata/...`` by default. 100 | 101 | Some of those behaviours can be adjusted, see :ref:`customisation`. 102 | 103 | Trying Your API Out 104 | ------------------- 105 | 106 | You should now have a working JSON-API. A quick test. The following assumes that 107 | you have already created and set up a pyramid project in development mode 108 | (``python setup.py develop`` in pyramid 1.6, ``pip install -e`` in pyramid 1.7). 109 | 110 | Make sure you have activated your virtualenv: 111 | 112 | .. code-block:: bash 113 | 114 | $ source env/bin/activate 115 | 116 | Start the server: 117 | 118 | .. code-block:: bash 119 | 120 | $ pserve your_project/development.ini 121 | 122 | Assuming you have a collection named 'people' and are using the rather lovely 123 | `httpie `_ to test: 124 | 125 | .. code-block:: bash 126 | 127 | $ http http://localhost:6543/api/people 128 | 129 | HTTP/1.1 200 OK 130 | Content-Length: 1387 131 | Content-Type: application/vnd.api+json; charset=UTF-8 132 | Date: Fri, 28 Aug 2015 20:22:46 GMT 133 | Server: waitress 134 | 135 | .. code-block:: json 136 | 137 | { 138 | "data": [ 139 | { 140 | "type": "people", 141 | "id": "1", 142 | "attributes": { 143 | "name": "alice" 144 | }, 145 | "links": { 146 | "self": "http://localhost:6543/api/people/1" 147 | }, 148 | "relationships": { 149 | "": { 150 | "data": {"type": "", "id": ""} 151 | } 152 | } 153 | }, 154 | {""} 155 | ] 156 | } 157 | 158 | 159 | See ``test_project/test_project/__init__.py`` for a fully working 160 | ``__init__.py`` file. 161 | 162 | You don't need a ``views.py`` unless you have some other routes and views. 163 | 164 | There's also some metadata available at ``http://localhost:6543/metadata``. 165 | pyramid_jsonapi currently includes metadata modules to produce JSONSchema and 166 | OpenAPI/Swagger. See the :ref:`metadata` section. 167 | 168 | The following will fetch the JSONSchema for a successful response to a GET on the 169 | people endpoint: 170 | 171 | .. code-block:: bash 172 | 173 | $ http http://localhost:6543/metadata/JSONSchema/endpoint/people?method=get&direction=response&code=200 174 | 175 | .. code-block:: json 176 | 177 | { 178 | "type": "object", 179 | "additionalProperties": false, 180 | "properties": { 181 | "meta": { 182 | "$ref": "#/definitions/meta" 183 | }, 184 | "included": { 185 | "type": "array", 186 | "uniqueItems": true, 187 | "description": "To reduce the number of HTTP requests, servers **MAY** allow responses that include related resources along with the requested primary resources. Such responses are called \"compound documents\".", 188 | "items": { 189 | "$ref": "#/definitions/resource" 190 | } 191 | }, 192 | "jsonapi": { 193 | "$ref": "#/definitions/jsonapi" 194 | }, 195 | "data": { 196 | "$ref": "#/definitions/people_data" 197 | }, 198 | "links": { 199 | "allOf": [ 200 | { 201 | "$ref": "#/definitions/links" 202 | }, 203 | { 204 | "$ref": "#/definitions/pagination" 205 | } 206 | ], 207 | "description": "Link members related to the primary data." 208 | } 209 | }, 210 | "required": [ 211 | "data" 212 | ] 213 | } 214 | -------------------------------------------------------------------------------- /docs/source/client.rst: -------------------------------------------------------------------------------- 1 | .. _client: 2 | 3 | Consuming the API from the Client End 4 | ===================================== 5 | 6 | GET-ing Resources 7 | -------------------- 8 | 9 | A Collection 10 | ~~~~~~~~~~~~ 11 | 12 | .. code-block:: bash 13 | 14 | $ http GET http://localhost:6543/api/posts 15 | 16 | 17 | .. code-block:: json 18 | 19 | { 20 | "data": [ 21 | { 22 | "type": "posts", 23 | "id": "1", 24 | "attributes": { 25 | "content": "something insightful", 26 | "published_at": "2015-01-01T00:00:00", 27 | "title": "post1: alice.main" 28 | }, 29 | "links": { 30 | "self": "http://localhost:6543/api/posts/1" 31 | }, 32 | "relationships": { 33 | "author": { 34 | "data": { 35 | "id": "1", 36 | "type": "people" 37 | }, 38 | "links": { 39 | "related": "http://localhost:6543/api/posts/1/author", 40 | "self": "http://localhost:6543/api/posts/1/relationships/author" 41 | }, 42 | "meta": { 43 | "direction": "MANYTOONE", 44 | "results": {} 45 | } 46 | }, 47 | "blog": { 48 | "data": { 49 | "id": "1", 50 | "type": "blogs" 51 | }, 52 | "links": { 53 | "related": "http://localhost:6543/api/posts/1/blog", 54 | "self": "http://localhost:6543/api/posts/1/relationships/blog" 55 | }, 56 | "meta": { 57 | "direction": "MANYTOONE", 58 | "results": {} 59 | } 60 | }, 61 | "comments": { 62 | "data": [], 63 | "links": { 64 | "related": "http://localhost:6543/api/posts/1/comments", 65 | "self": "http://localhost:6543/api/posts/1/relationships/comments" 66 | }, 67 | "meta": { 68 | "direction": "ONETOMANY", 69 | "results": { 70 | "available": 0, 71 | "limit": 10, 72 | "returned": 0 73 | } 74 | } 75 | } 76 | } 77 | }, 78 | "... 5 more results ..." 79 | ], 80 | "links": { 81 | "first": "http://localhost:6543/api/posts?sort=id&page%5Boffset%5D=0", 82 | "last": "http://localhost:6543/api/posts?sort=id&page%5Boffset%5D=0", 83 | "self": "http://localhost:6543/api/posts" 84 | }, 85 | "meta": { 86 | "results": { 87 | "available": 6, 88 | "limit": 10, 89 | "offset": 0, 90 | "returned": 6 91 | } 92 | } 93 | } 94 | 95 | 96 | Note that we have: 97 | 98 | * ``data`` which is an array of posts objects, each with: 99 | 100 | * a ``type``, which is the collection name 101 | 102 | * an ``id``, which is the value of the primary key column (which may or may not be called ``id``) 103 | 104 | * ``attributes``, as expected 105 | 106 | * a ``links`` object with: 107 | 108 | * a ``self`` link 109 | 110 | * relationship objects for each relationship with: 111 | 112 | * ``data`` with resource identifiers for related objects 113 | 114 | * ``self`` and ``related`` links 115 | 116 | * some other information about the relationship in ``meta`` 117 | 118 | * ``links`` with: 119 | 120 | * ``self`` and 121 | 122 | * ``pagination`` links 123 | 124 | * ``meta`` with: 125 | 126 | * some extra information about the number of results returned. 127 | 128 | A Single Resource 129 | ~~~~~~~~~~~~~~~~~ 130 | 131 | .. code-block:: bash 132 | 133 | $ http GET http://localhost:6543/api/posts/1 134 | 135 | Returns a single resource object in ``data`` and no pagination links. 136 | 137 | .. code-block:: json 138 | 139 | { 140 | "data": { 141 | "type": "posts", 142 | "id": "1", 143 | "attributes": { 144 | "content": "something insightful", 145 | "published_at": "2015-01-01T00:00:00", 146 | "title": "post1: alice.main" 147 | }, 148 | "links": { 149 | "self": "http://localhost:6543/api/posts/1" 150 | }, 151 | "relationships": { 152 | "author": { 153 | "data": { 154 | "id": "1", 155 | "type": "people" 156 | }, 157 | "links": { 158 | "related": "http://localhost:6543/api/posts/1/author", 159 | "self": "http://localhost:6543/api/posts/1/relationships/author" 160 | }, 161 | "meta": { 162 | "direction": "MANYTOONE", 163 | "results": {} 164 | } 165 | }, 166 | "blog": { 167 | "data": { 168 | "id": "1", 169 | "type": "blogs" 170 | }, 171 | "links": { 172 | "related": "http://localhost:6543/api/posts/1/blog", 173 | "self": "http://localhost:6543/api/posts/1/relationships/blog" 174 | }, 175 | "meta": { 176 | "direction": "MANYTOONE", 177 | "results": {} 178 | } 179 | }, 180 | "comments": { 181 | "data": [], 182 | "links": { 183 | "related": "http://localhost:6543/api/posts/1/comments", 184 | "self": "http://localhost:6543/api/posts/1/relationships/comments" 185 | }, 186 | "meta": { 187 | "direction": "ONETOMANY", 188 | "results": { 189 | "available": 0, 190 | "limit": 10, 191 | "returned": 0 192 | } 193 | } 194 | } 195 | } 196 | }, 197 | "links": { 198 | "self": "http://localhost:6543/api/posts/1" 199 | }, 200 | "meta": {} 201 | } 202 | 203 | Sparse Fieldsets 204 | ~~~~~~~~~~~~~~~~ 205 | 206 | We can ask only for certain fields (attributes and relationships are 207 | collectively known as fields). 208 | 209 | Use the ``fields`` parameter, parameterized by collection name 210 | (fields[collection]), with the value set as a comma separated list of field 211 | names. 212 | 213 | So, to return only the title attribute and author relationship of each post: 214 | 215 | .. code-block:: bash 216 | 217 | $ http GET http://localhost:6543/api/posts?fields[posts]=title,author 218 | 219 | The resulting json has a ``data`` element with a list of objects something like 220 | this: 221 | 222 | .. code-block:: json 223 | 224 | { 225 | "attributes": { 226 | "title": "post1: bob.second" 227 | }, 228 | "id": "6", 229 | "links": { 230 | "self": "http://localhost:6543/api/posts/6" 231 | }, 232 | "relationships": { 233 | "author": { 234 | "data": { 235 | "id": "2", 236 | "type": "people" 237 | }, 238 | "links": { 239 | "related": "http://localhost:6543/api/posts/6/author", 240 | "self": "http://localhost:6543/api/posts/6/relationships/author" 241 | }, 242 | "meta": { 243 | "direction": "MANYTOONE", 244 | "results": {} 245 | } 246 | } 247 | }, 248 | "type": "posts" 249 | } 250 | 251 | Sorting 252 | ~~~~~~~ 253 | 254 | You can specify a sorting attribute and order with the sort query parameter. 255 | 256 | Sort posts by title: 257 | 258 | .. code-block:: bash 259 | 260 | $ http GET http://localhost:6543/api/posts?sort=title 261 | 262 | and in reverse: 263 | 264 | .. code-block:: bash 265 | 266 | $ http GET http://localhost:6543/api/posts?sort=-title 267 | 268 | Sorting by multiple attributes (e.g. ``sort=title,content``) and sorting by attributes of related objects (`sort=author.name`) are supported. 269 | 270 | A sort on id is assumed unless the sort parameter is specified. 271 | 272 | Pagination 273 | ~~~~~~~~~~ 274 | 275 | You can specify the pagination limit and offset: 276 | 277 | .. code-block:: bash 278 | 279 | $ http GET http://localhost:6543/api/posts?fields[posts]=title\&page[limit]=2\&page[offset]=2 280 | 281 | We asked for only the ``title`` field above so that the results would be more 282 | compact... 283 | 284 | .. code-block:: json 285 | 286 | { 287 | "data": [ 288 | { 289 | "attributes": { 290 | "title": "post1: alice.second" 291 | }, 292 | "id": "3", 293 | "links": { 294 | "self": "http://localhost:6543/api/posts/3" 295 | }, 296 | "relationships": {}, 297 | "type": "posts" 298 | }, 299 | { 300 | "attributes": { 301 | "title": "post1: bob.main" 302 | }, 303 | "id": "4", 304 | "links": { 305 | "self": "http://localhost:6543/api/posts/4" 306 | }, 307 | "relationships": {}, 308 | "type": "posts" 309 | } 310 | ], 311 | "links": { 312 | "first": "http://localhost:6543/api/posts?page%5Blimit%5D=2&sort=id&page%5Boffset%5D=0", 313 | "last": "http://localhost:6543/api/posts?page%5Blimit%5D=2&sort=id&page%5Boffset%5D=4", 314 | "next": "http://localhost:6543/api/posts?page%5Blimit%5D=2&sort=id&page%5Boffset%5D=4", 315 | "prev": "http://localhost:6543/api/posts?page%5Blimit%5D=2&sort=id&page%5Boffset%5D=0", 316 | "self": "http://localhost:6543/api/posts?fields[posts]=title&page[limit]=2&page[offset]=2" 317 | }, 318 | "meta": { 319 | "results": { 320 | "available": 6, 321 | "limit": 2, 322 | "offset": 2, 323 | "returned": 2 324 | } 325 | } 326 | } 327 | 328 | There's a default page limit which is used if the limit is not specified and a 329 | maximum limit that the server will allow. Both of these can be set in the ini 330 | file. 331 | 332 | Filtering 333 | ~~~~~~~~~ 334 | 335 | The JSON API spec doesn't say much about filtering syntax, other than that it 336 | should use the parameter key ``filter``. There are multiple filtering syntaxes 337 | available in pyramid_jsonapi. 338 | 339 | The first is simple filtering and has been 340 | available since the first release. It uses the following syntax: 341 | 342 | .. code:: 343 | 344 | filter[:]= 345 | 346 | where: 347 | 348 | * ``attribute_spec`` is either a direct attribute name or a dotted path to an 349 | attribute via relationships (only one level of relationships is currently supported). 350 | 351 | * ``operator`` is one of the list of supported operators (:ref:`search_filter_operators`). 352 | 353 | * ``value`` is the value to match on. 354 | 355 | This is simple and reasonably effective. It's a little awkward on readability though. If you feel that you have a syntax that is more readable, more powerful, easier to parse or has some other advantage, let me know - I'd be interested in any thoughts. 356 | 357 | Search operators in sqlalchemy (called column comparators) must be registered before they are treated as valid for use in json-api filters. The procedure for registering them, and the list of those registered by default can be found in :ref:`search_filter_operators`. 358 | 359 | To specify another search filter syntax use the syntax name with a ``*`` in 360 | front in the square brackets after ``filter``, like 361 | ``filter[*rql]=some rql filter``. 362 | 363 | Filter languages available: 364 | 365 | * RQL defined here ``_ as implemented in 366 | rqlalchemy ``_. 367 | 368 | Filter Examples 369 | ^^^^^^^^^^^^^^^ 370 | 371 | **Simple**: 372 | 373 | Find all the people with name 'alice': 374 | 375 | .. code-block:: bash 376 | 377 | http GET http://localhost:6543/api/people?filter[name:eq]=alice 378 | 379 | Find all the posts published after 2015-01-03: 380 | 381 | .. code-block:: bash 382 | 383 | http GET http://localhost:6543/api/posts?filter[published_at:gt]=2015-01-03 384 | 385 | Find all the posts with 'bob' somewhere in the title: 386 | 387 | .. code-block:: bash 388 | 389 | http GET http://localhost:6543/api/posts?filter[title:like]=*bob* 390 | 391 | Find all the posts where the author has the name 'alice': 392 | 393 | .. code-block:: bash 394 | 395 | http GET http://localhost:6543/api/posts?filter[author.name:eq]=alice 396 | 397 | **RQL** 398 | 399 | Find all the people with name 'alice': 400 | 401 | .. code-block:: bash 402 | 403 | http :6543/api/people filter[*rql]='eq(name,alice)' 404 | 405 | Find all the posts where the author has the name 'alice': 406 | 407 | .. code-block:: bash 408 | 409 | http :6543/api/posts filter[*rql]='eq((author,name),alice)' 410 | -------------------------------------------------------------------------------- /docs/source/developing.rst: -------------------------------------------------------------------------------- 1 | .. _developing: 2 | 3 | Developing pyramid-jsonapi 4 | ========================== 5 | 6 | Development 7 | ----------- 8 | 9 | This project is set up to use `tox` to create a suitable testing environment. 10 | You must first install `tox` - either system-wide, or in it's own virtualenv: 11 | 12 | Set up the `tox` environment as follows: 13 | 14 | 15 | .. code-block:: bash 16 | 17 | # To install tox 18 | python3 -mvenv toxenv 19 | toxenv/bin/pip install tox 20 | 21 | # To run tox and test the project: 22 | toxenv/bin/tox 23 | 24 | *Note*: The `toxenv` virtualenv only exists to deliver `tox`, *NOT* for development. 25 | 26 | `tox` creates it's own virtualenvs for testing in `.tox/` which can be used for code testing and development. 27 | These contain all of the dependencies for both the project and testing, as well as the local `pyramid-jsonapi` 28 | 29 | You can use these in the usual way for your own testing and development, e.g.: 30 | 31 | .. code-block:: bash 32 | 33 | source .tox/py3/bin/activate 34 | 35 | 36 | Contribution 37 | ------------- 38 | 39 | All contributions are welcome! You can contribute by making *pull requests* to 40 | the git repo: ``_ 41 | 42 | Travis (``_) is run against 43 | all PRs and commits to ensure consistent, high-quality code. 44 | 45 | Tests 46 | ^^^^^^ 47 | 48 | ``unittest`` tests should be created for all new code. Coverage can be reviewed at: 49 | ``_ 50 | 51 | PEP8 52 | ^^^^ 53 | 54 | Code should pass PEP8 validation: 55 | 56 | * long lines should be avoided, but not at the expense of readability. (``pycodestyle --ignore=E501`` is used when testing). 57 | 58 | pylint 59 | ^^^^^^ 60 | 61 | Code should pass pylint validation: 62 | 63 | * ``# pylint: disable=xxx`` is allowed where there is a clear reason for doing so. Please document as necessary. 64 | 65 | Idiomatic Python 66 | ^^^^^^^^^^^^^^^^ 67 | Is to be preferred wherever possible. 68 | 69 | Python Versions 70 | ^^^^^^^^^^^^^^^^ 71 | Currently pyramid_jsonapi is built and tested against python 3. 3.4 or later is recommended. 72 | 73 | Versioning 74 | ^^^^^^^^^^^ 75 | Semantic versioning should be used, see 76 | `PEP440 - Version Identification and Dependency Specification `_ 77 | for details. 78 | 79 | 80 | Documentation 81 | ------------- 82 | 83 | Documentation is built using sphinx. This is done automatically using Travis for 84 | certain builds (e.g. tagged releases) and pushed to the *gh-pages* branch. 85 | 86 | Note that the documentation uses the ``sphinx-rtd-theme`` 87 | 88 | To manually build the documentation: 89 | 90 | .. code-block:: bash 91 | 92 | docs/sphinx.sh 93 | 94 | Documentation will be written to `docs/build/` (in .gitignore). 95 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | `pyramid-jsonapi` Documentation 2 | ******************************** 3 | 4 | pyramid-jsonapi aims to create a JSON-API (``_) standard API 5 | from a database using the sqlAlchemy ORM and pyramid framework. 6 | 7 | Contents 8 | ======== 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | 13 | welcome 14 | api-creation 15 | client 16 | customisation 17 | metadata 18 | developing 19 | apidoc/pyramid_jsonapi 20 | 21 | 22 | .. include:: readme.inc 23 | 24 | Indices and tables 25 | ================== 26 | 27 | * :ref:`genindex` 28 | * :ref:`modindex` 29 | * :ref:`search` 30 | -------------------------------------------------------------------------------- /docs/source/metadata.rst: -------------------------------------------------------------------------------- 1 | .. _metadata: 2 | 3 | Metadata Modules 4 | ================ 5 | 6 | Metadata modules provide access to metadata about the API. The 7 | :class:`pyramid_jsonapi.metadata` class is responsible for loading the modules 8 | and setting up routes and views under ``/metadata`` 9 | (by default - see `settings`). 10 | 11 | Built-in Modules 12 | ---------------- 13 | 14 | The ``metadata_modules`` configuration option lists modules which are to be loaded 15 | (see `settings`). If this list is empty, no modules 16 | will be loaded. 17 | 18 | *Note*: Some modules are required for core functionality - for example schema 19 | validation requires the ``jsonschema`` module. 20 | 21 | The default setting for this option includes the following modules: 22 | 23 | .. toctree:: 24 | :glob: 25 | 26 | apidoc/pyramid_jsonapi.metadata.* 27 | 28 | Custom Modules 29 | -------------- 30 | 31 | As well as the built-in modules it is possible to write new metadata modules and add them to 32 | the ``metadata_modules`` list (space or newline separated). 33 | 34 | **Note** - if modifying the ``metadata_modules`` list, you will need to explicitly 35 | include any of the ones provided with the project you wish to keep active. 36 | 37 | Requirements 38 | ^^^^^^^^^^^^ 39 | 40 | Any modules must follow these rules in order to work properly: 41 | 42 | * The module MUST contain a class with the same name as the package. 43 | * The class MUST expect to be passed a reference to the :class:`pyramid_jsonapi.JSONAPI` instance as the first argument. 44 | * The class MAY contain a ``views`` attribute, which contains a list of :class:`pyramid_jsonapi.metadata.VIEWS` namedtuple instances. These are mapped onto a 45 | :func:`pyramid.config.add_view` call. 46 | (Views are optional - methods may exist in modules to be called 'internally by other methods/modules, rather than being web-facing). 47 | 48 | 49 | For example, to add a custom metadata module called ``Foo``, you need to do the following: 50 | 51 | 1. Create a ``Foo`` package, and ensure it is available to be imported in the python environment. 52 | 53 | 2. In ``Foo/__init__.py`` add the following: 54 | 55 | 56 | .. code-block:: python 57 | 58 | class Foo(): 59 | 60 | def __init__(self, api): 61 | 62 | self.views = [ 63 | pyramid_jsonapi.metadata.VIEWS( 64 | attr='generate_dict', # The method to associate with the view 65 | route_name='', # The relative route name to attach this method to (defaults to /metadata/Foo) 66 | request_method='', # The http request method (defaults to GET) 67 | renderer='' # The pyramid renderer (defaults to json) 68 | ), 69 | pyramid_jsonapi.metadata.VIEWS( 70 | attr='generate_string', 71 | route_name='resource/{endpoint}', 72 | request_method='GET', 73 | renderer='string' 74 | ), 75 | ] 76 | 77 | def generate_dict(self, request): 78 | return {'foo': 'bar'} 79 | 80 | def generate_string(self, request): 81 | return "foo: {}".format(request.matchdict['endpoint']) 82 | 83 | 84 | Note the use of the `Route pattern syntax `_ 85 | in the second example would result in ``generate_string()`` being called for route ``/metadata/Foo/resource/baz`` with endpoint set to ``baz``. 86 | -------------------------------------------------------------------------------- /docs/source/readme.inc: -------------------------------------------------------------------------------- 1 | .. include:: ../../README.rst 2 | -------------------------------------------------------------------------------- /docs/source/welcome.rst: -------------------------------------------------------------------------------- 1 | ../../README.rst -------------------------------------------------------------------------------- /docs/sphinx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Try to ensure we're in the project 'root' for relative paths etc to work. 4 | cd "$(dirname $0)/.." 5 | 6 | PATH=bin/:$PATH 7 | SOURCE=${SOURCE:-docs/source} 8 | TARGET=${TARGET:-docs/build} 9 | sphinx-apidoc -f -T -e -o ${SOURCE}/apidoc pyramid_jsonapi 10 | # apidoc and dataclasses seem to both want create attribute docstrings which 11 | # clash for a space in the index. 12 | echo " :noindex:" >> ${SOURCE}/apidoc/pyramid_jsonapi.permissions.rst 13 | # Generate config docs from python method 14 | python -c 'import pyramid_jsonapi; import pyramid_settings_wrapper as psw; s = psw.Settings({}, defaults=pyramid_jsonapi.PyramidJSONAPI.config_defaults); print(s.sphinx_doc())' >docs/source/apidoc/settings.inc 15 | travis-sphinx --outdir=${TARGET} build --source=${SOURCE} 16 | # Build docs if this is master branch, and HEAD has a tag associated with it 17 | if [[ -n $TRAVIS_TAG ]]; then 18 | # Get a pylint badge 19 | wget --tries=3 --timeout=20 https://mperlet.github.io/pybadge/badges/$(pylint pyramid_jsonapi |grep "rated at" |awk '{print $7}' |cut -f 1 -d '/').svg -O ${TARGET}/pylint-badge.svg 20 | echo "Deploying docs to gh-pages..." 21 | travis-sphinx --outdir=${TARGET} deploy 22 | fi 23 | -------------------------------------------------------------------------------- /pyramid_jsonapi/authoriser.py: -------------------------------------------------------------------------------- 1 | from cachetools import cached 2 | from cachetools.keys import hashkey 3 | from dataclasses import dataclass 4 | from functools import partial 5 | from pyramid_jsonapi.permissions import Targets, PermissionTarget 6 | from pyramid_jsonapi.collection_view import CollectionViewBase 7 | 8 | 9 | @dataclass 10 | class Authoriser: 11 | view: CollectionViewBase 12 | 13 | def iterate_authorised_items(self, it, errors): 14 | return filter(partial(self.authorise_item, errors=errors), it) 15 | 16 | def authorise_item(self, item, errors): 17 | if item is None: 18 | return True 19 | perms = self.item_permissions(item) 20 | if not perms.id and errors is not None: 21 | view = self.view.view_instance(item.__class__) 22 | ref = f'{view.collection_name}::{view.item_id(item)}' 23 | errors['objects'][ref] = 'GET id denied' 24 | return False 25 | return True 26 | 27 | def authorised_item(self, item, errors): 28 | if self.authorise_item(item, errors): 29 | return item 30 | return None 31 | 32 | def item_permissions_key(self, item): 33 | view = self.view.view_instance(item.__class__) 34 | return ( 35 | view.collection_name, 36 | str(getattr(item, view.key_column.name)) 37 | ) 38 | 39 | @cached(cache={}, key=item_permissions_key) 40 | def item_permissions(self, item): 41 | view = self.view.view_instance(item.__class__) 42 | pf = view.permission_filter('get', Targets.item, 'alter_result') 43 | return pf(item, PermissionTarget(Targets.item)) 44 | -------------------------------------------------------------------------------- /pyramid_jsonapi/callbacks_doc.py: -------------------------------------------------------------------------------- 1 | # pylint: skip-file 2 | 3 | 4 | def after_serialise_object(view_instance, obj): 5 | """Called after a resource object is serialised, before it is returned. 6 | 7 | Use this callback to alter objects as they are serialised: perhaps merging 8 | information from other data sources, perhaps removing restricted information 9 | or denying access (raise an appropriate exception). 10 | 11 | Args: 12 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 13 | instance. 14 | 15 | obj (dict): serialised object. 16 | 17 | Returns: 18 | dict: serialised resource object. 19 | """ 20 | 21 | 22 | def after_serialise_identifier(view_instance, identifier): 23 | """Called after a resource identifier is serialised, before it is returned. 24 | 25 | Args: 26 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 27 | instance. 28 | 29 | identifier (dict): serialised identifier. 30 | 31 | Returns: 32 | dict: serialised resource identifier. 33 | """ 34 | 35 | 36 | def after_get(view_instance, document): 37 | """Called just before view_instance.get() returns. 38 | 39 | Args: 40 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 41 | instance. 42 | 43 | document (dict): JSON-API top level document. 44 | 45 | Returns: 46 | dict: altered JSON-API top level document. 47 | """ 48 | 49 | 50 | def before_patch(view_instance, partial_object): 51 | """Called before a patch is applied. 52 | 53 | Args: 54 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 55 | instance. 56 | 57 | partial_object (dict): JSON-API patch object. 58 | 59 | Returns: 60 | dict: altered patch object. 61 | """ 62 | 63 | 64 | def before_delete(view_instance, db_item): 65 | """Called before an object is deleted. 66 | 67 | Args: 68 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 69 | instance. 70 | 71 | db_item: item returned by sqlalchemy query. 72 | """ 73 | 74 | 75 | def after_collection_get(view_instance, document): 76 | """Called just before view_instance.collection_get() returns. 77 | 78 | Args: 79 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 80 | instance. 81 | 82 | document (dict): JSON-API top level document. 83 | 84 | Returns: 85 | dict: altered JSON-API top level document. 86 | """ 87 | 88 | 89 | def before_collection_post(view_instance, obj): 90 | """Called before enacting view_instance.collection_post(). 91 | 92 | Args: 93 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 94 | instance. 95 | 96 | obj (dict): JSON-API object to be created. 97 | """ 98 | 99 | 100 | def after_related_get(view_instance, document): 101 | """Called before view_instance.related_get() returns. 102 | 103 | Args: 104 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 105 | instance. 106 | 107 | document (dict): JSON-API top level document. 108 | 109 | Returns: 110 | dict: altered JSON-API top level document. 111 | """ 112 | 113 | 114 | def after_relationships_get(view_instance, document): 115 | """Called before view_instance.relationships_get() returns. 116 | 117 | Args: 118 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 119 | instance. 120 | 121 | document (dict): JSON-API top level document. 122 | 123 | Returns: 124 | dict: altered JSON-API top level document. 125 | """ 126 | 127 | 128 | def before_relationships_post(view_instance, data): 129 | """Called before enacting view_instance.relationships_post(). 130 | 131 | Args: 132 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 133 | instance. 134 | 135 | data (dict or list): resource identifier or a list of them. 136 | """ 137 | 138 | 139 | def before_relationships_patch(view_instance, data): 140 | """Called before enacting view_instance.relationships_patch(). 141 | 142 | Args: 143 | view_instance (pyramid_jsonapi.CollectionViewBase): the current view 144 | instance. 145 | 146 | data (dict or list): resource identifier or a list of them. 147 | """ 148 | 149 | 150 | def before_relationships_delete(view_instance, parent_db_item): 151 | """ 152 | """ 153 | -------------------------------------------------------------------------------- /pyramid_jsonapi/db_query.py: -------------------------------------------------------------------------------- 1 | from pyramid.httpexceptions import HTTPBadRequest 2 | from pyramid_jsonapi.http_query import QueryInfo 3 | from rqlalchemy import RQLQueryMixIn 4 | from sqlalchemy.orm import load_only, Query as BaseQuery 5 | 6 | 7 | class PJQueryMixin: 8 | 9 | @staticmethod 10 | def from_view(view, **kwargs): 11 | query = view.dbsession.query( 12 | view.model 13 | ) 14 | query.__class__ = RQLQuery 15 | query.pj_view = view 16 | return query.pj_options(**kwargs) 17 | 18 | def pj_options(self, **kwargs): 19 | query = self 20 | for key, val in kwargs.items(): 21 | query = getattr(query, f'_opt_{key}')(val) 22 | return query 23 | 24 | def _opt_loadonly(self, loadonly): 25 | if not loadonly: 26 | loadonly = self.pj_view.allowed_requested_query_columns.keys() 27 | return self.options(load_only(*loadonly)) 28 | 29 | def pj_count(self): 30 | return self.count() 31 | 32 | def add_filtering(self): 33 | return self.pj_view.query_add_filtering(self) 34 | 35 | def add_relative_paging(self): 36 | query = self 37 | view = self.pj_view 38 | qinfo = QueryInfo(view.__class__, view.request) 39 | pinfo = qinfo.paging_info 40 | 41 | # We just add filters here. The necessary joins will have been done by the 42 | # Sorting that after relies on. 43 | # Need >= or <= on all but the last prop. 44 | if pinfo.start_type.endswith('_id'): 45 | before_after = self.before_after_from_id(qinfo, pinfo.item_id) 46 | else: 47 | before_after = pinfo.before_after 48 | for sinfo, after in zip(qinfo.sorting_info[:-1], before_after[:-1]): 49 | ascending = not sinfo.ascending if query._pj_reversed else sinfo.ascending 50 | if ascending: 51 | query = query.filter(sinfo.prop >= after) 52 | else: 53 | query = query.filter(sinfo.prop <= after) 54 | # And > or < on the last one. 55 | ascending = qinfo.sorting_info[-1].ascending 56 | ascending = not ascending if query._pj_reversed else ascending 57 | # first and last have empty before_afters 58 | if before_after: 59 | if ascending: 60 | query = query.filter(qinfo.sorting_info[-1].prop > before_after[-1]) 61 | else: 62 | query = query.filter(qinfo.sorting_info[-1].prop < before_after[-1]) 63 | 64 | return query 65 | 66 | def before_after_from_id(self, qinfo, item_id): 67 | item = self.pj_view.get_item(item_id) 68 | if not item: 69 | raise HTTPBadRequest(f'Could not find item with after_id {item_id}') 70 | vals = [self.get_prop_value(item, info) for info in qinfo.sorting_info] 71 | return vals 72 | 73 | def get_prop_value(self, item, prop_info): 74 | val = item 75 | for key in prop_info.colspec: 76 | val = getattr(val, key) 77 | return val 78 | 79 | def id_only(self): 80 | return self.options(load_only(self.pj_view.key_column.name)) 81 | 82 | def iterate_paged(self, page_size=None): 83 | page_size = page_size or self.pj_view.query_info.paging_info.limit 84 | cur_query = self.limit(page_size) 85 | records_yielded = 0 86 | records_from_cur = 0 87 | while True: 88 | # Loop through records in a page: 89 | for record in cur_query: 90 | records_yielded += 1 91 | records_from_cur += 1 92 | yield record 93 | # End of a page 94 | if records_from_cur < page_size: 95 | break 96 | records_from_cur = 0 97 | cur_query = self.offset(records_yielded).limit(page_size) 98 | 99 | 100 | class RQLQuery(BaseQuery, RQLQueryMixIn, PJQueryMixin): 101 | 102 | def _rql_ilike(self, args): 103 | attr, value = args 104 | 105 | attr = self._rql_attr(attr) 106 | value = self._rql_value(value, attr) 107 | value = value.replace("*", "%") 108 | 109 | return attr.ilike(value) 110 | 111 | def _rql_icontains(self, args): 112 | attr, value = args 113 | attr = self._rql_attr(attr) 114 | value = self._rql_value(value, attr) 115 | return attr.ilike(f'%{value}%') 116 | 117 | def _rql_isempty(self, args): 118 | attr = self._rql_attr(args[0]) 119 | return attr.__eq__('') 120 | 121 | def _rql_isnull(self, args): 122 | attr = self._rql_attr(args[0]) 123 | # None value translates to 'IS NULL' 124 | return attr.__eq__(None) 125 | 126 | def _rql_isnotempty(self, args): 127 | attr = self._rql_attr(args[0]) 128 | return attr.__ne__('') 129 | 130 | def _rql_isnotnull(self, args): 131 | attr = self._rql_attr(args[0]) 132 | # None value translates to 'IS NOT NULL' 133 | return attr.__ne__(None) 134 | 135 | def _rql_istrue(self, args): 136 | attr = self._rql_attr(args[0]) 137 | return attr.__eq__(True) 138 | 139 | def _rql_isfalse(self, args): 140 | attr = self._rql_attr(args[0]) 141 | return attr.__eq__(False) 142 | -------------------------------------------------------------------------------- /pyramid_jsonapi/filters.py: -------------------------------------------------------------------------------- 1 | """Classes and methods for handling filter operators.""" 2 | 3 | import re 4 | from sqlalchemy.dialects.postgresql import JSONB 5 | 6 | 7 | class FilterRegistry: 8 | """Registry of allowed filter operators. 9 | 10 | Attributes: 11 | data (dict): data store for filter op information. 12 | """ 13 | 14 | def __init__(self): 15 | self.data = {} 16 | self.register_standard_filters() 17 | 18 | def register_standard_filters(self): 19 | """Register standard supported filter operators.""" 20 | for comparator_name in ( 21 | '__eq__', 22 | '__ne__', 23 | 'startswith', 24 | 'endswith', 25 | 'contains', 26 | '__lt__', 27 | '__gt__', 28 | '__le__', 29 | '__ge__' 30 | ): 31 | self.register(comparator_name) 32 | # Transform '%' to '*' for like and ilike 33 | for comparator_name in ( 34 | 'like', 35 | 'ilike' 36 | ): 37 | self.register( 38 | comparator_name, 39 | value_transform=lambda val: re.sub(r'\*', '%', val) 40 | ) 41 | # JSONB specific operators 42 | for comparator_name in ( 43 | 'contains', 44 | 'contained_by', 45 | 'has_all', 46 | 'has_any', 47 | 'has_key' 48 | ): 49 | self.register( 50 | comparator_name, 51 | column_type=JSONB 52 | ) 53 | 54 | def register( 55 | self, 56 | comparator_name, 57 | filter_name=None, 58 | value_transform=lambda val: val, 59 | column_type='__ALL__' 60 | ): 61 | """ Register a new filter operator. 62 | 63 | Args: 64 | comparator_name (str): name of sqlalchemy comparator method. 65 | filter_name(str): name of filter param in URL. Defaults to 66 | comparator_name with any occurrences of '__' removed (so '__eq__' 67 | defaults to 'eq', for example). 68 | value_transform (func): function taking the filter value as the only 69 | argument and returning a transformed value. Defaults to a 70 | function returning an unmodified value. 71 | column_type (class): type (class object, not name) for which this 72 | operator is to be registered. Defaults to '__ALL__' (the string) 73 | which makes the operator valid for all column types. 74 | """ 75 | try: 76 | registry = self.data[column_type] 77 | except KeyError: 78 | registry = self.data[column_type] = {} 79 | registry[filter_name or comparator_name.replace('__', '')] = { 80 | 'comparator_name': comparator_name, 81 | 'value_transform': value_transform 82 | } 83 | 84 | def get_filter(self, column_type, filter_name): 85 | """Get dictionary of filter information. 86 | 87 | Args: 88 | column_type (class): type (class object, not name) of a Column. 89 | filter_name(str): name of filter param in URL. 90 | 91 | Returns: 92 | dict: information dictionary for filter. Type specific entry if it 93 | exists, entry from '__ALL__' if it does not. 94 | 95 | Raises: 96 | KeyError: if filter_name is not in the type specific or ALL sections. 97 | """ 98 | try: 99 | return self.data[column_type][filter_name] 100 | except KeyError: 101 | return self.data['__ALL__'][filter_name] 102 | 103 | def valid_filter_names(self, column_types=None): 104 | """Return set of supported filter operator names.""" 105 | ops = set() 106 | column_types = set(column_types or {k for k in self.data}) 107 | column_types.add('__ALL__') 108 | for ctype in column_types: 109 | ops |= self.data[ctype].keys() 110 | return ops 111 | -------------------------------------------------------------------------------- /pyramid_jsonapi/http_query.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from collections.abc import Iterable 4 | from dataclasses import dataclass, field 5 | from functools import cached_property, cache 6 | from pyramid.request import Request 7 | from pyramid.settings import asbool 8 | from pyramid.httpexceptions import HTTPBadRequest 9 | 10 | 11 | class ColspecMixin: 12 | 13 | @cached_property 14 | def colspec(self): 15 | return self._colspec_value.split(':')[0].split('.') 16 | 17 | @cached_property 18 | def rels(self): 19 | rels = [] 20 | vc = self.view_class 21 | for rname in self.colspec[:-1]: 22 | try: 23 | rel = vc.relationships[rname] 24 | except KeyError: 25 | raise HTTPBadRequest(f"{vc.collection_name} has no relationship {rname}") 26 | rels.append(rel) 27 | vc = self.view_class.api.view_classes[rel.tgt_class] 28 | return rels 29 | 30 | @cached_property 31 | def prop(self): 32 | if self.rels: 33 | vc = self.view_class.api.view_classes[self.rels[-1].tgt_class] 34 | else: 35 | vc = self.view_class 36 | try: 37 | return getattr(vc.model, self.colspec[-1]) 38 | except AttributeError: 39 | raise HTTPBadRequest(f"Collection '{vc.collection_name}' has no attribute '{self.colspec[-1]}'") 40 | 41 | 42 | @dataclass 43 | class QueryInfo: 44 | view_class: 'CollectionViewBase' 45 | request: Request 46 | 47 | @cached_property 48 | def filter_info(self): 49 | return tuple( 50 | FilterInfo(self.view_class, pname, pval) 51 | for pname, pval in self.request.params.items() 52 | if pname.startswith('filter[') 53 | ) 54 | 55 | @cached_property 56 | def sorting_info(self): 57 | return tuple( 58 | SortingInfo(self.view_class, value) 59 | for value in self.request.params.get('sort', self.view_class.key_column.name).split(',') 60 | ) 61 | 62 | @cached_property 63 | def paging_info(self): 64 | return PagingInfo(self.view_class, self.request, self.sorting_info) 65 | 66 | @cache 67 | def rel_paging_info(self, rel_path): 68 | return PagingInfo(self.view_class, self.request, self.sorting_info, rel_path) 69 | 70 | @cached_property 71 | def pj_include_count(self): 72 | return asbool( 73 | self.request.params.get('pj_include_count', 'false') 74 | ) 75 | 76 | @cached_property 77 | def field_info(self): 78 | return tuple( 79 | FieldInfo(key, val) for key, val in self.request.params.items() 80 | if key.startswith('fields[') 81 | ) 82 | 83 | 84 | @dataclass 85 | class FieldInfo: 86 | key: str 87 | val: str 88 | 89 | @cached_property 90 | def collection_name(self): 91 | # Remove "fields[" from the start and "]" from the end. 92 | return self.key[7:-1] 93 | 94 | @cached_property 95 | def field_names(self): 96 | return ','.split(self.val) 97 | 98 | 99 | @dataclass 100 | class FilterInfo(ColspecMixin): 101 | view_class: 'CollectionViewBase' 102 | pname: str 103 | value: str 104 | 105 | @cached_property 106 | def _colspec_value(self): 107 | return self.filter_key 108 | 109 | @cached_property 110 | def filter_key(self): 111 | # Remove "filter[" from the start and "]" from the end of the param name. 112 | return self.pname[7:-1] 113 | 114 | @cached_property 115 | def filter_type(self): 116 | if self.filter_key.startswith('*'): 117 | return self.filter_key[1:] 118 | else: 119 | return 'native' 120 | 121 | @cached_property 122 | def op(self): 123 | try: 124 | _, op = self.filter_key.split(':') 125 | except ValueError: 126 | return 'eq' 127 | return op 128 | 129 | 130 | @dataclass 131 | class SortingInfo(ColspecMixin): 132 | view_class: 'CollectionViewBase' 133 | value: str 134 | ascending: bool = field(init=False) 135 | 136 | def __post_init__(self): 137 | if self.value.startswith('-'): 138 | self.ascending = False 139 | self.value = self.value[1:] 140 | else: 141 | self.ascending = True 142 | 143 | def __str__(self) -> str: 144 | if self.ascending: 145 | return self.value 146 | else: 147 | return f'-{self.value}' 148 | 149 | @cached_property 150 | def colspec(self): 151 | return tuple( 152 | self.view_class.key_column.name if cname == 'id' else cname 153 | for cname in self.value.split('.') 154 | ) 155 | 156 | 157 | @dataclass 158 | class PagingInfo: 159 | view_class: 'CollectionViewBase' 160 | request: Request 161 | sorting_info: Iterable[SortingInfo] = tuple() 162 | rel_path: str = None 163 | start_type: str = field(init=False, default=None) 164 | limit: int = field(init=False) 165 | prefix: str = '' 166 | 167 | def __post_init__(self): 168 | # We need params a lot so shorten some lines: 169 | if self.rel_path: 170 | self.prefix = f'{self.rel_path}:' 171 | else: 172 | self.prefix = '' 173 | prefix = self.prefix 174 | params = self.request.params 175 | self.limit = min( 176 | self.view_class.max_limit, 177 | int(params.get(f'page[{prefix}limit]', self.view_class.default_limit)) 178 | ) 179 | if self.limit < 0: 180 | raise HTTPBadRequest(f'page[{prefix}limit] must not be negative.') 181 | 182 | possible_start_types = ( 183 | 'before', 'after', 'before_id', 'after_id', 184 | 'first', 'last', 185 | 'offset' 186 | ) 187 | start_types_found = [st for st in possible_start_types if f'page[{prefix}{st}]' in params] 188 | if len(start_types_found) > 1: 189 | raise HTTPBadRequest( 190 | f'You cannot provide multiple start types: {[f"page[{prefix}{st}]" for st in start_types_found]}' 191 | ) 192 | if len(start_types_found) == 1: 193 | self.start_type = start_types_found[0] 194 | self.start_type = self.start_type or 'first' 195 | 196 | @cached_property 197 | def start_arg(self): 198 | return self.request.params.get(f'page[{self.prefix}{self.start_type}]') 199 | 200 | @cached_property 201 | def before_after(self): 202 | if self.is_terminal: 203 | return [] 204 | args = self.start_arg.split(',') 205 | if len(args) != len(self.sorting_info): 206 | raise HTTPBadRequest(f'page[{self.prefix}{self.start_type}] list must match sort column list.') 207 | return args 208 | 209 | @cached_property 210 | def before(self): 211 | return self.before_after 212 | 213 | @cached_property 214 | def after(self): 215 | return self.before_after 216 | 217 | @cached_property 218 | def item_id(self): 219 | return self.start_arg 220 | 221 | @cached_property 222 | def offset(self): 223 | offset = int(self.request.params.get('page[offset]', 0)) 224 | if offset < 0: 225 | raise HTTPBadRequest('page[offset] must not be negative.') 226 | return offset 227 | 228 | @cached_property 229 | def is_relative(self): 230 | return self.start_type in {'before', 'after', 'before_id', 'after_id', 'first', 'last'} 231 | 232 | @cached_property 233 | def is_terminal(self): 234 | return self.start_type in {'first', 'last'} 235 | 236 | @cached_property 237 | def needs_reversed(self): 238 | if self.start_type in {'before', 'before_id', 'last'}: 239 | return True 240 | return False 241 | 242 | @cached_property 243 | def page_start(self): 244 | return getattr(self, self.start_type) 245 | 246 | 247 | def includes(request): 248 | incs = request.params.get('include') 249 | if not incs: 250 | return [] 251 | return incs.split(',') 252 | 253 | 254 | def include_chain(include): 255 | chain = [] 256 | names = include.split('.') 257 | for i in range(len(names)): 258 | chain.append(tuple(names[:i + 1])) 259 | return chain 260 | 261 | 262 | def longest_includes(includes): 263 | seen = set() 264 | longest = set() 265 | for inc in includes: 266 | inc_chain = include_chain(inc) 267 | if inc_chain[-1] in seen: 268 | continue 269 | seen |= set(inc_chain) 270 | longest -= set(inc_chain[:-1]) 271 | longest.add(inc_chain[-1]) 272 | return longest 273 | -------------------------------------------------------------------------------- /pyramid_jsonapi/metadata/OpenAPI/__init__.py: -------------------------------------------------------------------------------- 1 | """Generate OpenAPI documentation from Models, Schema and Endpoint info. 2 | 3 | This module provides 2 ``metadata`` views: 4 | 5 | * ``OpenAPI`` - the Swagger UI. 6 | * ``OpenAPI/specification`` - the project OpenAPI specification (JSON). 7 | 8 | Configuration 9 | ------------- 10 | 11 | The config option ``openapi_file`` can be used to provide a JSON or YAML 12 | file which will be used to update the dynamically generated documentation. 13 | 14 | Metadata for the OpenAPI documentation will be extracted from the ``PKG-INFO`` 15 | data provided with the pyramid package that is using ``pyramid_jsonapi``, 16 | using the ``pkginfo`` module. This requires that the pyramid package can be 17 | located in the python path (i.e it can be imported). 18 | 19 | Documentation is dynamically generated from several sources: 20 | 21 | * Endpoint data -> endpoints, parameters, request and response content. 22 | * Model docstrings -> endpoint descriptions. 23 | * sqlalchemy columns -> schemas (via JSONSchema module). 24 | 25 | """ 26 | 27 | import functools 28 | import os.path 29 | import yaml 30 | import pkginfo 31 | from pyramid.renderers import JSON 32 | 33 | from pyramid_jsonapi.metadata import VIEWS 34 | 35 | 36 | class OpenAPI(): 37 | """Auto-generate OpenAPI documentation.""" 38 | 39 | def __init__(self, api): 40 | self.api = api 41 | self.metadata = {} 42 | # Load mako templating 43 | self.api.config.include('pyramid_mako') 44 | self.api.config.add_renderer('json_sorted', JSON(sort_keys=True)) 45 | 46 | self.views = [ 47 | VIEWS( 48 | attr='openapi_spec', 49 | route_name='specification', 50 | request_method='', 51 | renderer='json_sorted' 52 | ), 53 | VIEWS( 54 | attr='swagger_ui', 55 | route_name='', 56 | request_method='', 57 | renderer='pyramid_jsonapi.metadata.OpenAPI:swagger-ui/index.mako' 58 | ) 59 | ] 60 | 61 | @staticmethod 62 | def swagger_ui(request): 63 | """Dynamically generate the swagger-ui index.html 64 | 65 | Parameters: 66 | request (optional): Pyramid Request object. 67 | 68 | Returns: 69 | dict containing variables for template substitution. 70 | """ 71 | 72 | return {'openapi_url': "{}/specification".format(request.current_route_url())} 73 | 74 | def openapi_spec(self, request=None): # pylint:disable=unused-argument 75 | """Return the OpenAPI specification dict (as a pyramid view). 76 | 77 | Parameters: 78 | request (optional): Pyramid Request object. 79 | 80 | Returns: 81 | OpenAPI template document. 82 | """ 83 | 84 | return self.generate_openapi(request=request) 85 | 86 | @functools.lru_cache() 87 | def generate_pkg_metadata(self): 88 | """Get metadatsa for 'parent' pyramid package.""" 89 | # Get the PKG-INFO metadata for the 'parent' pyramid app 90 | pkg_name = self.api.config.package_name 91 | self.metadata = pkginfo.Installed(pkg_name) 92 | 93 | @staticmethod 94 | def build_content(schema, description='', mediatype='application/vnd.api+json'): 95 | """Construct a content dictionary for a given schema.""" 96 | 97 | return { 98 | 'description': description, 99 | 'content': { 100 | mediatype: { 101 | 'schema': schema 102 | } 103 | } 104 | } 105 | 106 | def build_parameters(self, opts): 107 | """Build paramaters schema.""" 108 | 109 | # Add 'in: query' parameters - 'global' and ep-specific 110 | parameters = [] 111 | for param, val in self.api.endpoint_data.endpoints['query_parameters'].items(): 112 | schema = {} 113 | if isinstance(val, list): 114 | schema['type'] = 'array' 115 | schema['items'] = {'type': 'string', 116 | 'pattern': '|'.join(["^{}$".format(x) for x in val])} 117 | else: 118 | schema['type'] = 'string' 119 | q_param = { 120 | 'name': param, 121 | 'in': 'query', 122 | 'schema': schema, 123 | } 124 | parameters.append(q_param) 125 | 126 | # Add 'in: path' parameters extracted from route_pattern 127 | if 'route_pattern' in opts: 128 | for field in opts['route_pattern']['fields']: 129 | parameters.append({ 130 | 'name': field, 131 | 'in': 'path', 132 | 'required': True, 133 | 'schema': { 134 | 'type': 'string' 135 | } 136 | }) 137 | return parameters 138 | 139 | def build_request(self, name, method): 140 | """Build requestBody part of schema.""" 141 | 142 | content = self.api.metadata.JSONSchema.endpoint_schema( 143 | name, 144 | method.lower(), 145 | 'request', 146 | '999' # Code is irrelevant for requests 147 | ) 148 | 149 | return self.build_content(content) 150 | 151 | def build_responses(self, name, ep_type, method): 152 | """Build responses part of schema.""" 153 | responses = {} 154 | resp_data = dict() 155 | for resps in self.api.endpoint_data.find_all_keys('responses', ep_type, method): 156 | for http_class, opts in resps.items(): 157 | if http_class not in resp_data: 158 | resp_data[http_class] = opts['reason'] 159 | else: 160 | resp_data[http_class].extend(opts['reason']) 161 | for response, reason in resp_data.items(): 162 | responses[str(response.code)] = self.build_content( 163 | self.api.metadata.JSONSchema.endpoint_schema( 164 | name, 165 | method.lower(), 166 | 'response', 167 | response.code 168 | ), 169 | description="\n\n".join(set(reason)), 170 | ) 171 | return responses 172 | 173 | def recurse_remove_keys(self, dictionary, name): 174 | """Recursively build a new version of dictionary with named keys removed.""" 175 | new_dict = {} 176 | for key, value in dictionary.items(): 177 | if key != name: 178 | if isinstance(value, dict): 179 | new_dict[key] = self.recurse_remove_keys(value, name) 180 | else: 181 | new_dict[key] = value 182 | return new_dict 183 | 184 | def replace_in_value_inner(self, val, old, new): 185 | """Inner part of recurse_replace_in_value.""" 186 | if isinstance(val, (dict, list, tuple)): 187 | return self.recurse_replace_in_value(val, old, new) 188 | elif isinstance(val, str): 189 | return val.replace(old, new) 190 | return val 191 | 192 | def recurse_replace_in_value(self, obj, old, new): 193 | """Recursively replace() strings in values.""" 194 | if isinstance(obj, dict): 195 | new_obj = {} 196 | for key, val in obj.items(): 197 | new_obj[key] = self.replace_in_value_inner(val, old, new) 198 | elif isinstance(obj, (list, tuple)): 199 | new_obj = [] 200 | for val in obj: 201 | new_obj.append(self.replace_in_value_inner(val, old, new)) 202 | return new_obj 203 | 204 | @functools.lru_cache() 205 | def generate_openapi(self, request=None): 206 | """Generate openapi documentation.""" 207 | 208 | # OpenAPI 'template' 209 | openapi = { 210 | # OpenAPI specification version 211 | 'openapi': '3.0.0', 212 | 'paths': {}, 213 | } 214 | 215 | self.generate_pkg_metadata() 216 | 217 | openapi['info'] = { 218 | 'title': self.metadata.name or '', 219 | 'description': self.metadata.description or '', 220 | 'version': self.api.settings.api_version or self.metadata.version or '', 221 | 'contact': { 222 | 'name': self.metadata.author or '', 223 | 'email': self.metadata.author_email or '', 224 | 'url': self.metadata.home_page or request.host_url or '' 225 | }, 226 | 'license': { 227 | 'name': self.metadata.license or '' 228 | } 229 | } 230 | 231 | ep_data = self.api.endpoint_data 232 | 233 | # Split the route_path using the metadata_pattern. 234 | # any prefixes are then prefixed to path_name later 235 | # This handles hosting from a sub-directory. 236 | base_path = '' 237 | if request: 238 | base_path, _ = request.current_route_path().split( 239 | ep_data.rp_constructor.metadata_pattern('OpenAPI'), 240 | 1 241 | ) 242 | 243 | # Set server url to be relative url of base and api/version path 244 | openapi['servers'] = [ 245 | { 246 | 'url': "/{}".format(ep_data.rp_constructor.api_pattern('', base=base_path)) 247 | } 248 | ] 249 | 250 | paths = {} 251 | # Iterate through all view_classes, getting name (for path) 252 | for model, view_class in self.api.view_classes.items(): 253 | name = view_class.collection_name 254 | # Iterate through endpoints, adding paths and methods 255 | for ep_type, opts in ep_data.endpoints['endpoints'].items(): 256 | # Add appropriate suffix to path endpoint, if present (e.g. {id}) 257 | suffix = ep_data.route_pattern_to_suffix( 258 | opts.get('route_pattern', {}) 259 | ) 260 | # Create full path, stripping trailing slash if suffix was empty 261 | path_name = os.path.normpath("/{}/{}".format(name, suffix)) 262 | 263 | paths[path_name] = {} 264 | for method in opts['http_methods']: 265 | paths[path_name][method.lower()] = {} 266 | paths[path_name][method.lower()]['parameters'] = self.build_parameters(opts) 267 | if opts['http_methods'][method].get('request_schema', False): 268 | paths[path_name][method.lower()]['requestBody'] = self.build_request(name, method) 269 | if opts['http_methods'][method].get('response_schema', True): 270 | paths[path_name][method.lower()]['responses'] = self.build_responses(name, ep_type, method) 271 | 272 | # Add description 273 | paths[path_name][method.lower()]['description'] = model.__doc__ or '' 274 | 275 | # Add 'paths' to the openapi spec 276 | openapi['paths'].update(paths) 277 | 278 | # Add the JSONSchema JSONAPI definitions to the openapi spec 279 | openapi.update({'x-definitions': self.api.metadata.JSONSchema.template()['definitions']}) 280 | 281 | # Update openapi dict from external yaml/json file, if provided in config. 282 | openapi_file = str(self.api.settings.openapi_file) 283 | if openapi_file: 284 | with open(openapi_file) as oa_f: 285 | openapi.update(yaml.safe_load(oa_f.read())) 286 | 287 | # TODO: patternProperties not supported in openapi, so remove all occurrences 288 | # https://github.com/OAI/OpenAPI-Specification/issues/687 289 | openapi = self.recurse_remove_keys(openapi, 'patternProperties') 290 | 291 | # Re-map all definitions refs to x-definitions 292 | openapi = self.recurse_replace_in_value(openapi, 'definitions', 'x-definitions') 293 | 294 | return openapi 295 | -------------------------------------------------------------------------------- /pyramid_jsonapi/metadata/OpenAPI/swagger-ui/index.mako: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Swagger UI 7 | 8 | 9 | 10 | 11 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 |
69 | 70 | 71 | 72 | 93 | 94 | 95 | 96 | -------------------------------------------------------------------------------- /pyramid_jsonapi/metadata/OpenAPI/update-swagger-ui.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir -p swagger-ui 3 | # Script to download latest swagger-ui-dist index.html from github 4 | wget -qO- https://github.com/swagger-api/swagger-ui/archive/master.tar.gz | tar -xvz --strip-components 2 -C swagger-ui --wildcards */dist/index.html 5 | 6 | # CDN url 7 | CDN_URL=https://cdnjs.cloudflare.com/ajax/libs/swagger-ui/3.4.2/ 8 | 9 | # Convert html to template (adding CDN for JS libs) 10 | sed -e "s|\./|$CDN_URL|" -e 's/url: ".*"/url: "\${openapi_url}"/' swagger-ui/index.html >swagger-ui/index.mako 11 | 12 | -------------------------------------------------------------------------------- /pyramid_jsonapi/metadata/__init__.py: -------------------------------------------------------------------------------- 1 | """This package contains metadata 'plugin' modules 2 | that provide extra information related to the API being generated, 3 | such as documentation, schemas etc. 4 | 5 | Such plugins can optionally be offered as pyramid routes and views 6 | under the 'metadata' endpoint.""" 7 | 8 | import collections 9 | import importlib 10 | 11 | 12 | class MetaData(): 13 | """Adds routes and views for all metadata modules. 14 | 15 | Metadata modules are added by inclusion in the ``metadata_modules`` option 16 | in settings. 17 | Modules should be space or newline separated, and must be installed such 18 | that they can be imported by python. 19 | 20 | All modules MUST have a class with the same name as the package. 21 | This class MAY contain a 'views' attribute, which contains a list 22 | of 'VIEWS' namedtuple instances, which will be converted into pyramid 23 | routes and views. 24 | """ 25 | 26 | def __init__(self, api): 27 | self.api = api 28 | # aslist expects space-separated strings to convert to lists. 29 | # iter_modules returns a list of tuples - we only want name ([1]) 30 | self.modules = self.api.settings.metadata_modules.aslist() 31 | self.make_routes_views() 32 | 33 | def make_routes_views(self): 34 | """Generate routes and views for plugin modules.""" 35 | for mod_name in self.modules: 36 | # Import the module from the name provided 37 | module = importlib.import_module("{}.{}".format(__name__, mod_name)) 38 | # Each module should have a class with the same name 39 | class_name = mod_name 40 | mclass = getattr(module, class_name)(self.api) 41 | # Attach the instance as an attribute named after the class 42 | setattr(self, mod_name, mclass) 43 | views = getattr(mclass, 'views', []) 44 | for view in views: 45 | rp_constructor = self.api.endpoint_data.rp_constructor 46 | route_name = self.api.endpoint_data.make_route_name( 47 | class_name, 48 | suffix=view.route_name 49 | ) 50 | route_pattern = rp_constructor.metadata_pattern( 51 | class_name, 52 | view.route_name 53 | ) 54 | self.api.config.add_route( 55 | route_name, 56 | route_pattern 57 | ) 58 | self.api.config.add_view( 59 | mclass, 60 | attr=str(view.attr), 61 | route_name=route_name, 62 | request_method=view.request_method or 'GET', 63 | renderer=view.renderer or 'json', 64 | ) 65 | 66 | 67 | VIEWS = collections.namedtuple('Views', 'attr request_method route_name renderer') 68 | -------------------------------------------------------------------------------- /pyramid_jsonapi/permissions.py: -------------------------------------------------------------------------------- 1 | '''Implement classes representing permissions''' 2 | # Standard library imports. 3 | from enum import ( 4 | Enum, 5 | ) 6 | from dataclasses import ( 7 | dataclass, 8 | field, 9 | ) 10 | from functools import ( 11 | lru_cache, 12 | ) 13 | from typing import ( 14 | FrozenSet, 15 | Any, 16 | ) 17 | 18 | 19 | __all__ = ( 20 | 'Permission', 21 | 'PermissionTarget', 22 | 'Targets', 23 | 'TemplateMissmatch', 24 | ) 25 | 26 | 27 | class TemplateMissmatch(Exception): 28 | """ 29 | Signal a missmatch in templates. 30 | """ 31 | 32 | 33 | class PermissionDenied(Exception): 34 | """ 35 | Internal exception for permission denied. 36 | """ 37 | 38 | 39 | class Targets(Enum): 40 | """ 41 | Possible PermissionTarget types. 42 | """ 43 | collection = 1 44 | item = 2 45 | relationship = 3 46 | 47 | 48 | @dataclass(eq=True, frozen=True) 49 | class PermissionTarget: 50 | type: Targets 51 | name: str = None 52 | 53 | 54 | @dataclass(eq=True, frozen=True) 55 | class PermissionBase: 56 | """ 57 | Base class for Permission. We define a separate base class so that 58 | Permission can use it as the type of its template attribute. 59 | """ 60 | template: Any = None 61 | attributes: FrozenSet[str] = None 62 | relationships: FrozenSet[str] = None 63 | id: bool = None 64 | 65 | 66 | @dataclass(eq=True, frozen=True) 67 | class Permission(PermissionBase): 68 | """ 69 | Represent all possible permissions. 70 | 71 | Attributes: 72 | template: a Template object representing possible attributes and 73 | relationships. 74 | attributes: a frozenset of allowed attributes. 75 | relationships: a frozenset of allowed relationships. 76 | id: a boolean representing whether or not operations involving the 77 | resource identifier are allowed (like existence or viewing, adding 78 | and deleting from relationships). 79 | """ 80 | template: PermissionBase = field(repr=False, default=None) 81 | 82 | @staticmethod 83 | def _caclulate_attr_val(attr, curval, template_val, id_): 84 | if curval is None: 85 | # defaults 86 | if id_: 87 | return template_val 88 | else: 89 | return frozenset() 90 | elif curval is True: 91 | return template_val 92 | elif curval is False: 93 | return frozenset() 94 | else: 95 | return curval 96 | 97 | def __post_init__(self): 98 | if self.id is None: 99 | if self.template is None: 100 | raise TemplateMissmatch("An id must be supplied if template is None.") 101 | object.__setattr__(self, 'id', self.template.id) 102 | 103 | for attr in ('attributes', 'relationships'): 104 | curval = getattr(self, attr) 105 | if curval in (None, True, False) and self.template is None: 106 | raise TemplateMissmatch(f"{attr} must be supplied if template is None") 107 | template_val = getattr(self.template, attr, frozenset()) 108 | object.__setattr__( 109 | self, attr, 110 | self._caclulate_attr_val(attr, curval, template_val, self.id) 111 | ) 112 | if self.template is not None: 113 | remainder = getattr(self, attr) - template_val 114 | if remainder: 115 | raise KeyError(f'Template does not have {attr} {remainder}') 116 | 117 | def __or__(self, other): 118 | if self.__class__ is not other.__class__: 119 | return NotImplemented 120 | if self.template != other.template: 121 | raise TemplateMissmatch("Templates must match for union/or.") 122 | return self.__class__( 123 | self.template, 124 | attributes=self.attributes | other.attributes, 125 | relationships=self.relationships | other.relationships, 126 | id=self.id | other.id, 127 | ) 128 | 129 | def __and__(self, other): 130 | if self.__class__ is not other.__class__: 131 | return NotImplemented 132 | if self.template != other.template: 133 | raise TemplateMissmatch("Templates must match for intersect/and.") 134 | return self.__class__( 135 | self.template, 136 | attributes=self.attributes & other.attributes, 137 | relationships=self.relationships & other.relationships, 138 | id=self.id & other.id, 139 | ) 140 | 141 | def __sub__(self, other): 142 | if self.__class__ is not other.__class__: 143 | return NotImplemented 144 | if self.template != other.template: 145 | raise TemplateMissmatch("Templates must match for minus.") 146 | if self.id != other.id: 147 | raise ValueError("Ids must match for minus.") 148 | return self.__class__( 149 | self.template, 150 | attributes=self.attributes - other.attributes, 151 | relationships=self.relationships - other.relationships, 152 | id=self.id, 153 | ) 154 | 155 | @classmethod 156 | def from_pfilter(cls, template, value): 157 | """ 158 | Construct a Permission object from the return value of a permission filter. 159 | """ 160 | if isinstance(value, bool): 161 | return cls(template, id=value) 162 | elif isinstance(value, Permission): 163 | return value 164 | else: 165 | raise ValueError( 166 | f"Don't know how to construct a Permission from a {type(value)}" 167 | ) 168 | 169 | @classmethod 170 | @lru_cache 171 | def from_template_cached(cls, template): 172 | """ 173 | New instance from template with default atts and rels. Cached. 174 | """ 175 | return cls(template) 176 | 177 | @classmethod 178 | def template_from_view(cls, view): 179 | return cls( 180 | None, 181 | frozenset(view.all_attributes), 182 | frozenset(view.relationships), 183 | True, 184 | ) 185 | 186 | @classmethod 187 | def from_view(cls, view, attributes=None, relationships=None): 188 | """ 189 | New instance using a view (instance or class) to get the template. 190 | """ 191 | return cls(view.permission_template, attributes, relationships) 192 | 193 | @classmethod 194 | def subtractive(cls, template, attributes=set(), relationships=set()): 195 | """ 196 | New instance by subtracting attributes and relationships from template. 197 | """ 198 | return cls( 199 | template, 200 | attributes=template.attributes - attributes, 201 | relationships=template.relationships - relationships, 202 | ) 203 | 204 | @classmethod 205 | def from_view_subtractive(cls, view, attributes=set(), relationships=set()): 206 | """ 207 | New instance using view and subtracting atts and rels from full set. 208 | """ 209 | return cls.subtractive(view.permission_template, attributes, relationships) 210 | -------------------------------------------------------------------------------- /pyramid_jsonapi/resource.py: -------------------------------------------------------------------------------- 1 | from dataclasses import ( 2 | dataclass, 3 | ) 4 | 5 | __all__ = ( 6 | 'ResourceIndicator', 7 | ) 8 | 9 | 10 | @dataclass(eq=True, frozen=True) 11 | class ResourceIndicator: 12 | type: str 13 | id: str 14 | 15 | @classmethod 16 | def from_dict(cls, dict_): 17 | return cls(dict_['type'], dict_['id']) 18 | 19 | def to_dict(self): 20 | if self.id: 21 | return {'type': self.type, 'id': self.id} 22 | else: 23 | return None 24 | -------------------------------------------------------------------------------- /pyramid_jsonapi/serialiser.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | 3 | from typing import Sequence 4 | from pyramid_jsonapi.http_query import longest_includes, includes 5 | from pyramid_jsonapi.permissions import Targets, PermissionTarget 6 | 7 | 8 | class Serialiser: 9 | def __init__(self, view, authoriser=None) -> None: 10 | self.view = view 11 | self.authoriser = authoriser 12 | self.serialised_id_count = 0 13 | self.serialised_count = 0 14 | 15 | def serialise_item(self, item, errors=None, as_identifier=False): 16 | if item is None: 17 | return None 18 | view = self.view.view_instance(item.__class__) 19 | ser = { 20 | 'type': view.collection_name, 21 | 'id': str(view.item_id(item)) 22 | } 23 | if as_identifier: 24 | self.serialised_id_count += 1 25 | return ser 26 | perms = self.item_permissions(item) 27 | ser['attributes'] = {} 28 | ser['relationships'] = {} 29 | for attr in view.requested_attributes: 30 | if attr not in perms.attributes: 31 | continue 32 | ser['attributes'][attr] = getattr(item, attr) 33 | for rel_name, rel in view.requested_relationships.items(): 34 | if rel_name not in perms.relationships: 35 | continue 36 | ser['relationships'][rel_name] = rel_dict = {} 37 | if rel.to_many: 38 | rel_dict['data'] = [ 39 | self.serialise_item(rel_item, errors=errors, as_identifier=True) 40 | for rel_item in 41 | self.authorised_seq(getattr(item, rel_name), errors) 42 | ] 43 | else: 44 | rel_item = getattr(item, rel_name) 45 | if self.authoriser: 46 | rel_dict['data'] = self.serialise_item( 47 | self.authoriser.authorised_item(rel_item, errors), as_identifier=True 48 | ) 49 | else: 50 | rel_dict['data'] = self.serialise_item(rel_item, as_identifier=True) 51 | self.serialised_count += 1 52 | return ser 53 | 54 | def include(self, item, include_list, included_dict): 55 | if not include_list: 56 | return 57 | view = self.view.view_instance(item.__class__) 58 | rel_name = include_list[0] 59 | rel = view.relationships[rel_name] 60 | rel_view = view.view_instance(rel.tgt_class) 61 | rel_include_list = include_list[1:] 62 | rel_items = getattr(item, rel_name) 63 | if rel.to_one: 64 | rel_items = [rel_items] 65 | for rel_item in rel_items: 66 | if rel_item is None: 67 | continue 68 | ref_tuple = (rel_view.collection_name, str(getattr(rel_item, rel_view.key_column.name))) 69 | if ref_tuple not in included_dict: 70 | included_dict[ref_tuple] = rel_item 71 | if rel_include_list: 72 | self.include(rel_item, rel_include_list, included_dict) 73 | 74 | def item_permissions(self, item): 75 | if self.authoriser: 76 | return self.authoriser.item_permissions(item) 77 | return self.view.view_instance(item.__class__).permission_all 78 | 79 | def authorised_seq(self, seq, errors): 80 | if self.authoriser: 81 | return self.authoriser.iterate_authorised_items(seq, errors) 82 | return seq 83 | 84 | def serialise(self, data, limit, available=None, errors=None): 85 | ser = wf.Doc() 86 | included_dict = {} 87 | self.serialised_id_count = 0 88 | self.serialised_count = 0 89 | if isinstance(data, Sequence): 90 | many = True 91 | my_data = data 92 | else: 93 | many = False 94 | my_data = [data] 95 | ser_data = [self.serialise_item(item, errors) for item in my_data] 96 | if many: 97 | ser['data'] = ser_data 98 | else: 99 | ser['data'] = ser_data[0] 100 | for item in my_data: 101 | for inc in longest_includes(includes(self.view.request)): 102 | self.include(item, inc, included_dict) 103 | ser['included'] = [ 104 | self.serialise_item(o) for o in self.authorised_seq(included_dict.values(), errors) 105 | ] 106 | ser['meta'] = { 107 | 'serialised_count': self.serialised_count, 108 | 'serialised_id_count': self.serialised_id_count, 109 | 'rejected': errors, 110 | } 111 | ser['meta'].update( 112 | { 113 | 'results': { 114 | 'available': available, 115 | 'limit': limit, 116 | # 'returned': len(self.objects) 117 | } 118 | } 119 | ) 120 | if many: 121 | ser['links'] = links = {} 122 | if self.view.query_info.paging_info.start_type == 'offset': 123 | links.update(self.offset_pagination_links(available)) 124 | elif self.view.query_info.paging_info.is_relative: 125 | links.update(self.before_after_pagination_links(my_data)) 126 | return ser 127 | 128 | def offset_pagination_links(self, count): 129 | links = {} 130 | req = self.view.request 131 | route_name = req.matched_route.name 132 | qinfo = self.view.query_info 133 | _query = {'page[limit]': qinfo.paging_info.limit} 134 | _query['sort'] = ','.join(qi.value for qi in qinfo.sorting_info) 135 | if req.params.get('include'): 136 | _query['include'] = req.params.get('include') 137 | for finfo in qinfo.field_info: 138 | _query[finfo.key] = finfo.val 139 | for filtr in qinfo.filter_info: 140 | _query[filtr.pname] = filtr.value 141 | 142 | # First link. 143 | links['first'] = req.route_url( 144 | route_name, _query={**_query, 'page[offset]': 0}, **req.matchdict 145 | ) 146 | 147 | # Next link. 148 | next_offset = qinfo.paging_info.offset + qinfo.paging_info.limit 149 | if count is None or next_offset < count: 150 | _query['page[offset]'] = next_offset 151 | links['next'] = req.route_url( 152 | route_name, _query=_query, **req.matchdict 153 | ) 154 | 155 | # Previous link. 156 | if qinfo.paging_info.offset > 0: 157 | prev_offset = qinfo.paging_info.offset - qinfo.paging_info.limit 158 | if prev_offset < 0: 159 | prev_offset = 0 160 | _query['page[offset]'] = prev_offset 161 | links['prev'] = req.route_url( 162 | route_name, _query=_query, **req.matchdict 163 | ) 164 | 165 | # Last link. 166 | if count is not None: 167 | _query['page[offset]'] = ( 168 | max((count - 1), 0) // 169 | qinfo.paging_info.limit 170 | ) * qinfo.paging_info.limit 171 | links['last'] = req.route_url( 172 | route_name, _query=_query, **req.matchdict 173 | ) 174 | 175 | return links 176 | 177 | def before_after_pagination_links(self, data): 178 | links = {} 179 | req = self.view.request 180 | route_name = req.matched_route.name 181 | qinfo = self.view.query_info 182 | _query = {'page[limit]': qinfo.paging_info.limit} 183 | _query['sort'] = ','.join(str(qi) for qi in qinfo.sorting_info) 184 | for filtr in qinfo.filter_info: 185 | _query[filtr.pname] = filtr.value 186 | if req.params.get('include'): 187 | _query['include'] = req.params.get('include') 188 | for finfo in qinfo.field_info: 189 | _query[finfo.key] = finfo.val 190 | for filtr in qinfo.filter_info: 191 | _query[filtr.pname] = filtr.value 192 | 193 | # First link. 194 | links['first'] = req.route_url( 195 | route_name, _query={**_query, 'page[first]': 1}, **req.matchdict 196 | ) 197 | 198 | # Previous link. 199 | # vals = [] 200 | # for sinfo in qinfo.sorting_info: 201 | # val = self.objects[0].object 202 | # for col in sinfo.colspec: 203 | # val = getattr(val, col) 204 | # vals.append(str(val)) 205 | # _query['page[before]'] = ','.join(vals) 206 | _query_prev = None 207 | if data: 208 | _query_prev = {**_query, 'page[before_id]': str(self.view.item_id(data[0]))} 209 | else: 210 | if qinfo.paging_info.start_type in ('after', 'after_id', 'last'): 211 | # off the end of a list of pages. Link to last page. 212 | _query_prev = {**_query, 'page[last]': 1} 213 | # Otherwise either an empty search (no prev or next) or before beginning (no prev) 214 | if qinfo.paging_info.start_type == 'first': 215 | _query_prev = None 216 | if _query_prev: 217 | links['prev'] = req.route_url( 218 | route_name, _query=_query_prev, **req.matchdict 219 | ) 220 | 221 | # Next link. 222 | # vals = [] 223 | # for sinfo in qinfo.sorting_info: 224 | # val = self.objects[-1].object 225 | # for col in sinfo.colspec: 226 | # val = getattr(val, col) 227 | # vals.append(str(val)) 228 | # _query['page[after]'] = ','.join(vals) 229 | _query_next = None 230 | if data: 231 | _query_next = {**_query, 'page[after_id]': str(self.view.item_id(data[-1]))} 232 | else: 233 | if qinfo.paging_info.start_type in ('before', 'before_id', 'first'): 234 | # before beginning of a list of pages. Link to first page. 235 | _query_next = {**_query, 'page[first]': 1} 236 | # Otherwise either an empty search (no prev or next) or after end (no next) 237 | if qinfo.paging_info.start_type == 'last': 238 | _query_next = None 239 | if _query_next: 240 | links['next'] = req.route_url( 241 | route_name, _query=_query_next, **req.matchdict 242 | ) 243 | 244 | # Last link. 245 | _query['page[last]'] = '1' 246 | _query_last = {**_query, 'page[last]': 1} 247 | links['last'] = req.route_url( 248 | route_name, _query=_query_last, **req.matchdict 249 | ) 250 | 251 | return links 252 | -------------------------------------------------------------------------------- /pyramid_jsonapi/unit_tests.py: -------------------------------------------------------------------------------- 1 | # Standard library imports. 2 | from dataclasses import ( 3 | dataclass, 4 | ) 5 | from typing import ( 6 | FrozenSet, 7 | ) 8 | import unittest 9 | 10 | # Third party imports. 11 | 12 | # App imports. 13 | # from pyramid_jsonapi.collection_view import ( 14 | # CollectionViewBase, 15 | # ) 16 | from pyramid_jsonapi.permissions import ( 17 | Permission, 18 | TemplateMissmatch, 19 | ) 20 | 21 | 22 | @dataclass 23 | class TestView: 24 | 25 | all_attributes: FrozenSet[str] 26 | relationships: FrozenSet[str] 27 | 28 | 29 | class Permissions(unittest.TestCase): 30 | 31 | def setUp(self): 32 | self.t = Permission(None, {'a1', 'a2', 'a3'}, {'r1', 'r2', 'r3'}, True) 33 | 34 | def test_template_from_view(self): 35 | v = TestView({'a1', 'a2', 'a3'}, {'r1', 'r2', 'r3'}) # pylint:disable=too-many-function-args 36 | t = Permission.template_from_view(v) 37 | self.assertEqual(t, self.t) 38 | 39 | def test_create_no_template(self): 40 | with self.assertRaises(TemplateMissmatch) as cm: 41 | Permission(None, {}, {}) 42 | self.assertTrue(cm.exception.startswith("An id")) 43 | with self.assertRaises(TemplateMissmatch) as cm: 44 | Permission(None, None, {}, True) 45 | self.assertTrue(cm.exception.startswith("attributes")) 46 | with self.assertRaises(TemplateMissmatch) as cm: 47 | Permission(None, {}, None, True) 48 | self.assertTrue(cm.exception.startswith("relationships")) 49 | 50 | def test_create_empty(self): 51 | p = Permission(self.t) 52 | self.assertEqual(p.attributes, self.t.attributes) 53 | self.assertEqual(p.relationships, self.t.relationships) 54 | 55 | def test_create_with_attributes(self): 56 | p = Permission(self.t, attributes={'a1'}) 57 | self.assertEqual(p.attributes, {'a1'}) 58 | self.assertEqual(p.relationships, self.t.relationships) 59 | 60 | def test_create_with_rels(self): 61 | p = Permission(self.t, relationships={'r1'}) 62 | self.assertEqual(p.attributes, self.t.attributes) 63 | self.assertEqual(p.relationships, {'r1'}) 64 | 65 | def test_create_with_false_id(self): 66 | p = Permission(self.t, id=False) 67 | self.assertEqual(p.attributes, set()) 68 | self.assertEqual(p.relationships, set()) 69 | p = Permission(self.t, id=False, attributes={'a1'}) 70 | self.assertEqual(p.attributes, {'a1'}) 71 | 72 | def test_create_subtractive(self): 73 | self.assertEqual( 74 | Permission.subtractive(self.t, {'a3'}, {'r3'}), 75 | Permission(self.t, {'a1', 'a2'}, {'r1', 'r2'}) 76 | ) 77 | 78 | def test_create_with_bool_attributes(self): 79 | self.assertEqual( 80 | self.t.attributes, Permission(self.t, True, True).attributes 81 | ) 82 | self.assertEqual( 83 | Permission(self.t, set(), set()), 84 | Permission(self.t, False, False) 85 | ) 86 | 87 | def test_create_with_incorrect_atts(self): 88 | with self.assertRaises(KeyError): 89 | Permission(self.t, attributes={'bad'}) 90 | 91 | def test_create_with_incorrect_rels(self): 92 | with self.assertRaises(KeyError): 93 | Permission(self.t, relationships={'bad'}) 94 | 95 | def test_op_or(self): 96 | p1 = Permission(self.t, {'a1'}, {'r1'}, True) 97 | p2 = Permission(self.t, {'a2'}, {'r2'}, False) 98 | por = p1 | p2 99 | self.assertEqual(por, Permission(self.t, {'a1', 'a2'}, {'r1', 'r2'}, True)) 100 | with self.assertRaises(TypeError) as cm: 101 | p1 | list() 102 | with self.assertRaises(TemplateMissmatch) as cm: 103 | p1 | Permission(p2) 104 | 105 | def test_op_and(self): 106 | p1 = Permission(self.t, {'a1', 'a2'}, {'r1', 'r2'}, True) 107 | p2 = Permission(self.t, {'a2'}, {'r2'}, False) 108 | pand = p1 & p2 109 | self.assertEqual(pand, Permission(self.t, {'a2'}, {'r2'}, False)) 110 | with self.assertRaises(TypeError) as cm: 111 | p1 & list() 112 | with self.assertRaises(TemplateMissmatch) as cm: 113 | p1 & Permission(p2) 114 | 115 | def test_op_sub(self): 116 | p1 = Permission(self.t, {'a1', 'a2'}, {'r1', 'r2'}, True) 117 | p2 = Permission(self.t, {'a2'}, {'r2'}, True) 118 | psub = p1 - p2 119 | self.assertEqual(psub, Permission(self.t, {'a1'}, {'r1'}, True)) 120 | with self.assertRaises(TypeError) as cm: 121 | p1 - list() 122 | with self.assertRaises(TemplateMissmatch) as cm: 123 | p1 - Permission(p2) 124 | with self.assertRaises(ValueError) as cm: 125 | p1 - Permission(self.t, {'a2'}, {'r2'}, False) 126 | -------------------------------------------------------------------------------- /pyramid_jsonapi/version.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Generate version information from git tags, archive info, or PKG-INFO. 3 | 4 | Based on: https://github.com/Changaco/version.py 5 | 6 | """ 7 | 8 | from os.path import dirname, isdir, isfile, join 9 | import re 10 | from subprocess import CalledProcessError, check_output 11 | import pkg_resources 12 | 13 | 14 | def get_version(): 15 | """Get version in any way possible.""" 16 | 17 | prefix = '' 18 | tag_re = re.compile(r'\btag: %s([0-9][^,]*)\b' % prefix) 19 | version_re = re.compile('^Version: (.+)$', re.M) 20 | 21 | # Return the version if it has been injected into the file by git-archive 22 | version = tag_re.search('HEAD -> master, tag: 2.2.13') 23 | if version: 24 | return version.group(1) 25 | 26 | project_dir = dirname(__file__) 27 | pkg_file = join(project_dir, '../PKG-INFO') 28 | 29 | if isdir(join(project_dir, '../.git')): 30 | # Get the version using "git describe". 31 | cmd = 'git -C %s describe --tags --match %s[0-9]* --dirty' % (project_dir, prefix) 32 | try: 33 | version = check_output(cmd.split()).decode().strip()[len(prefix):] 34 | except CalledProcessError: 35 | raise RuntimeError('Unable to get version number from git tags') 36 | 37 | # PEP 440 compatibility 38 | if '-' in version: 39 | version = '.dev'.join(version.split('-')[:2]) 40 | return version 41 | 42 | if isfile(pkg_file): 43 | # Extract the version from the PKG-INFO file. 44 | with open(pkg_file) as info_f: 45 | return version_re.search(info_f.read()).group(1) 46 | 47 | # Package was installed (os pkg, pip etc) without PKG-INFO 48 | return pkg_resources.get_distribution('pyramid-jsonapi').version 49 | 50 | 51 | if __name__ == '__main__': 52 | print(get_version()) 53 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/__init__.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | import sqlalchemy 3 | 4 | from functools import ( 5 | partial 6 | ) 7 | from itertools import ( 8 | islice, 9 | ) 10 | from pyramid.httpexceptions import ( 11 | HTTPBadRequest, 12 | HTTPForbidden, 13 | HTTPNotFound, 14 | ) 15 | from sqlalchemy.orm.interfaces import ( 16 | ONETOMANY, 17 | MANYTOMANY, 18 | MANYTOONE 19 | ) 20 | 21 | from pyramid_jsonapi.permissions import ( 22 | PermissionTarget, 23 | Targets, 24 | ) 25 | 26 | stages = ( 27 | 'alter_query', 28 | 'alter_related_query', 29 | 'alter_result', 30 | 'before_write_item', 31 | ) 32 | 33 | 34 | def get_one_altered_result_object(view, stages, query): 35 | res_obj = wf.execute_stage( 36 | view, stages, 'alter_result', 37 | wf.ResultObject(view, view.get_one(query, view.not_found_message)) 38 | ) 39 | if res_obj.tuple_identifier in view.pj_shared.rejected.rejected['objects']: 40 | raise HTTPForbidden(view.not_found_message) 41 | return res_obj 42 | 43 | 44 | def altered_objects_iterator(view, stages, stage_name, objects_iterable): 45 | """ 46 | Return an iterator of objects from objects_iterable filtered and altered by 47 | the stage_name stage. 48 | """ 49 | return filter( 50 | lambda o: o.tuple_identifier not in view.pj_shared.rejected.rejected['objects'], 51 | map( 52 | partial(wf.execute_stage, view, stages, stage_name), 53 | (wf.ResultObject(view, o) for o in objects_iterable) 54 | ) 55 | ) 56 | 57 | 58 | def get_related(obj, rel_name, stages, include_path=None): 59 | """ 60 | Get the objects related to obj via the relationship rel_name. 61 | """ 62 | view = obj.view 63 | include_path = include_path or [] 64 | rel_include_path = include_path + [rel_name] 65 | rel = view.relationships[rel_name] 66 | rel_view = view.view_instance(rel.tgt_class) 67 | many = rel.direction is ONETOMANY or rel.direction is MANYTOMANY 68 | is_included = view.path_is_included(rel_include_path) 69 | if rel.queryable: 70 | query = view.related_query(obj.object, rel, full_object=is_included) 71 | # print(query) 72 | query = wf.execute_stage( 73 | view, stages, 'alter_related_query', query 74 | ) 75 | # print('*' * 80) 76 | # print(rel_name) 77 | # print(query.statement.compile(view.dbsession.bind)) 78 | # print('*' * 80) 79 | objects_iterable = wf.wrapped_query_all(query) 80 | else: 81 | objects_iterable = getattr(obj.object, rel_name) 82 | if not many: 83 | objects_iterable = [objects_iterable] 84 | rel_objs = list( 85 | islice( 86 | altered_objects_iterator( 87 | rel_view, stages, 88 | 'alter_result', 89 | objects_iterable, 90 | ), 91 | view.related_limit(rel) 92 | ) 93 | ) 94 | rel_results = wf.Results( 95 | rel_view, 96 | objects=rel_objs, 97 | many=many, 98 | is_included=is_included 99 | ) 100 | if is_included: 101 | for rel_obj in rel_results.objects: 102 | for rel_rel_name in rel_obj.view.relationships: 103 | if wf.follow_rel(rel_obj.view, rel_rel_name, include_path=rel_include_path): 104 | rel_obj.related[rel_rel_name] = get_related( 105 | rel_obj, 106 | rel_rel_name, 107 | stages, 108 | include_path=rel_include_path 109 | ) 110 | if many: 111 | rel_results.limit = view.related_limit(rel) 112 | return rel_results 113 | 114 | 115 | def fill_result_object_related(res_obj, stages): 116 | view = res_obj.view 117 | for rel_name in view.relationships: 118 | if wf.follow_rel(view, rel_name): 119 | res_obj.related[rel_name] = get_related( 120 | res_obj, rel_name, stages 121 | ) 122 | 123 | 124 | def shp_get_item_alter_result(obj, view, stage, view_method): 125 | reason = "Permission denied." 126 | item_pf = view.permission_filter('get', Targets.item, stage) 127 | # pred = view.permission_to_dict(predicate(obj)) 128 | pred = item_pf(obj, PermissionTarget(Targets.item)) 129 | if not pred.id: 130 | view.pj_shared.rejected.reject_object(obj.tuple_identifier, reason) 131 | 132 | reject_atts = obj.attribute_mask - pred.attributes 133 | obj.attribute_mask &= pred.attributes 134 | # record rejected atts 135 | view.pj_shared.rejected.reject_attributes( 136 | obj.tuple_identifier, 137 | reject_atts, 138 | reason, 139 | ) 140 | 141 | rel_pf = view.permission_filter('get', Targets.relationship, stage) 142 | reject_rels = { 143 | rel for rel in obj.rel_mask 144 | if not rel_pf(obj, PermissionTarget(Targets.relationship, rel)) 145 | } 146 | obj.rel_mask -= reject_rels 147 | # record rejected rels 148 | view.pj_shared.rejected.reject_relationships( 149 | obj.tuple_identifier, 150 | reject_rels, 151 | reason, 152 | ) 153 | return obj 154 | 155 | 156 | def permission_handler(endpoint_name, stage_name): 157 | handlers = { 158 | 'item_get': { 159 | 'alter_result': shp_get_item_alter_result, 160 | }, 161 | 'collection_get': { 162 | 'alter_result': shp_get_item_alter_result, 163 | }, 164 | 'related_get': { 165 | 'alter_result': shp_get_item_alter_result, 166 | }, 167 | 'relationships_get': { 168 | 'alter_result': shp_get_item_alter_result, 169 | }, 170 | } 171 | # for ep in ('collection_get', 'related_get', 'relationships_get'): 172 | # handlers[ep] = handlers['item_get'] 173 | return handlers[endpoint_name][stage_name] 174 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/collection_get.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | import sqlalchemy 3 | 4 | from itertools import islice 5 | from pyramid.httpexceptions import ( 6 | HTTPBadRequest, 7 | HTTPInternalServerError, 8 | ) 9 | from . import stages 10 | from ...http_query import QueryInfo 11 | 12 | 13 | def workflow(view, stages): 14 | qinfo = view.query_info 15 | pinfo = qinfo.paging_info 16 | count = None 17 | 18 | query = view.base_collection_query() 19 | query_reversed = False 20 | if pinfo.start_type in ('last', 'before'): 21 | # These start types need to fetch records backwards (relative to their 22 | # nominal sort order) and reverse them before serialising. 23 | query_reversed = True 24 | query = view.query_add_sorting(query, reversed=query_reversed) 25 | query = view.query_add_filtering(query) 26 | 27 | if pinfo.start_type in ('after', 'before'): 28 | if qinfo.pj_include_count: 29 | count = full_search_count(view, stages) 30 | 31 | # We just add filters here. The necessary joins will have been done by the 32 | # Sorting that after relies on. 33 | # Need >= or <= on all but the last prop. 34 | for sinfo, after in zip(qinfo.sorting_info[:-1], pinfo.page_start[:-1]): 35 | ascending = not sinfo.ascending if query._pj_reversed else sinfo.ascending 36 | if ascending: 37 | query = query.filter(sinfo.prop >= after) 38 | else: 39 | query = query.filter(sinfo.prop <= after) 40 | # And > or < on the last one. 41 | ascending = qinfo.sorting_info[-1].ascending 42 | ascending = not ascending if query._pj_reversed else ascending 43 | if ascending: 44 | query = query.filter(qinfo.sorting_info[-1].prop > pinfo.page_start[-1]) 45 | else: 46 | query = query.filter(qinfo.sorting_info[-1].prop < pinfo.page_start[-1]) 47 | 48 | query = wf.execute_stage( 49 | view, stages, 'alter_query', query 50 | ) 51 | 52 | # Get the direct results from this collection (no related objects yet). 53 | # Stage 'alter_result' will run on each object. 54 | objects_iterator = wf.loop.altered_objects_iterator( 55 | view, stages, 'alter_result', wf.wrapped_query_all(query) 56 | ) 57 | # Only do paging the slow way if page[offset] is explicitly specified in the 58 | # request. 59 | offset_count = 0 60 | if pinfo.start_type == 'offset': 61 | offset_count = sum(1 for _ in islice(objects_iterator, pinfo.offset)) 62 | objects = list(islice(objects_iterator, pinfo.limit)) 63 | if query_reversed: 64 | objects.reverse() 65 | if pinfo.start_type in ('offset', None) and qinfo.pj_include_count: 66 | count = offset_count + len(objects) + sum(1 for _ in objects_iterator) 67 | results = wf.Results( 68 | view, 69 | objects=objects, 70 | many=True, 71 | is_top=True, 72 | count=count, 73 | limit=pinfo.limit 74 | ) 75 | 76 | # Fill the relationships with related objects. 77 | # Stage 'alter_result' will run on each object. 78 | for res_obj in results.objects: 79 | wf.loop.fill_result_object_related(res_obj, stages) 80 | 81 | return results.serialise() 82 | 83 | 84 | def full_search_count(view, stages): 85 | # Same as normal query but only id column and don't bother with sorting. 86 | query = view.base_collection_query(loadonly=[view.key_column.name]) 87 | query = view.query_add_filtering(query) 88 | query = wf.execute_stage( 89 | view, stages, 'alter_query', query 90 | ) 91 | objects_iterator = wf.loop.altered_objects_iterator( 92 | view, stages, 'alter_result', wf.wrapped_query_all(query) 93 | ) 94 | return sum(1 for _ in objects_iterator) 95 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/collection_post.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | import sqlalchemy 3 | 4 | from collections.abc import Sequence 5 | 6 | from .item_get import ( 7 | get_doc, 8 | ) 9 | 10 | from sqlalchemy.orm.interfaces import ( 11 | ONETOMANY, 12 | MANYTOMANY, 13 | MANYTOONE 14 | ) 15 | 16 | from pyramid.httpexceptions import ( 17 | HTTPBadRequest, 18 | HTTPForbidden, 19 | HTTPConflict, 20 | HTTPNotFound, 21 | ) 22 | from . import stages 23 | 24 | 25 | def workflow(view, stages): 26 | try: 27 | data = view.request.json_body['data'] 28 | except KeyError: 29 | raise HTTPBadRequest('data attribute required in POSTs.') 30 | 31 | if not isinstance(data, dict): 32 | raise HTTPBadRequest('data attribute must contain a single resource object.') 33 | 34 | # Check to see if we're allowing client ids 35 | if not view.api.settings.allow_client_ids and 'id' in data: 36 | raise HTTPForbidden('Client generated ids are not supported.') 37 | # Type should be correct or raise 409 Conflict 38 | datatype = data.get('type') 39 | if datatype != view.collection_name: 40 | raise HTTPConflict("Unsupported type '{}'".format(datatype)) 41 | try: 42 | atts = data['attributes'] 43 | except KeyError: 44 | atts = {} 45 | if 'id' in data: 46 | atts[view.model.__pyramid_jsonapi__['id_col_name']] = data['id'] 47 | item = view.model(**atts) 48 | with view.dbsession.no_autoflush: 49 | for relname, reldict in data.get('relationships', {}).items(): 50 | try: 51 | reldata = reldict['data'] 52 | except KeyError: 53 | raise HTTPBadRequest( 54 | 'relationships within POST must have data member' 55 | ) 56 | try: 57 | rel = view.relationships[relname] 58 | except KeyError: 59 | raise HTTPNotFound( 60 | 'No relationship {} in collection {}'.format( 61 | relname, 62 | view.collection_name 63 | ) 64 | ) 65 | rel_type = view.api.view_classes[rel.tgt_class].collection_name 66 | if rel.direction is ONETOMANY or rel.direction is MANYTOMANY: 67 | # reldata should be a list/array 68 | if not isinstance(reldata, Sequence) or isinstance(reldata, str): 69 | raise HTTPBadRequest( 70 | 'Relationship data should be an array for TOMANY relationships.' 71 | ) 72 | rel_items = [] 73 | for rel_identifier in reldata: 74 | if rel_identifier.get('type') != rel_type: 75 | raise HTTPConflict( 76 | 'Relationship identifier has type {} and should be {}'.format( 77 | rel_identifier.get('type'), rel_type 78 | ) 79 | ) 80 | try: 81 | rel_items.append(view.dbsession.query(rel.tgt_class).get(rel_identifier['id'])) 82 | except KeyError: 83 | raise HTTPBadRequest( 84 | 'Relationship identifier must have an id member' 85 | ) 86 | setattr(item, relname, rel_items) 87 | else: 88 | if (not isinstance(reldata, dict)) and (reldata is not None): 89 | raise HTTPBadRequest( 90 | 'Relationship data should be a resource identifier object or null.' 91 | ) 92 | if reldata.get('type') != rel_type: 93 | raise HTTPConflict( 94 | 'Relationship identifier has type {} and should be {}'.format( 95 | reldata.get('type'), rel_type 96 | ) 97 | ) 98 | try: 99 | setattr( 100 | item, 101 | relname, 102 | view.dbsession.query(rel.tgt_class).get(reldata['id']) 103 | ) 104 | except KeyError: 105 | raise HTTPBadRequest( 106 | 'No id member in relationship data.' 107 | ) 108 | item = wf.execute_stage( 109 | view, stages, 'before_write_item', item 110 | ) 111 | try: 112 | view.dbsession.add(item) 113 | view.dbsession.flush() 114 | except sqlalchemy.exc.IntegrityError as exc: 115 | raise HTTPConflict(exc.args[0]) 116 | view.request.response.status_code = 201 117 | item_id = view.id_col(item) 118 | view.request.response.headers['Location'] = view.request.route_url( 119 | view.api.endpoint_data.make_route_name(view.collection_name, suffix='item'), 120 | **{'id': item_id} 121 | ) 122 | 123 | # The rest of this is more or less a get. 124 | return get_doc( 125 | view, getattr(view, 'item_get').stages, view.single_item_query(item_id) 126 | ) 127 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/item_delete.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | import sqlalchemy 3 | 4 | from pyramid.httpexceptions import ( 5 | HTTPFailedDependency, 6 | ) 7 | from . import stages 8 | 9 | 10 | def workflow(view, stages): 11 | item = view.get_one( 12 | view.single_item_query(loadonly=[view.key_column.name]), 13 | not_found_message='No item {} in collection {}'.format( 14 | view.obj_id, view.collection_name 15 | ) 16 | ) 17 | item = wf.execute_stage( 18 | view, stages, 'before_write_item', item 19 | ) 20 | try: 21 | view.dbsession.delete(item) 22 | view.dbsession.flush() 23 | except sqlalchemy.exc.IntegrityError as exc: 24 | raise HTTPFailedDependency(str(exc)) 25 | doc = wf.Doc() 26 | doc['data'] = wf.ResultObject(view, item).identifier() 27 | return doc 28 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/item_get.py: -------------------------------------------------------------------------------- 1 | from pyramid.httpexceptions import ( 2 | HTTPForbidden, 3 | HTTPNotFound, 4 | ) 5 | import pyramid_jsonapi.workflow as wf 6 | from . import stages 7 | 8 | 9 | def get_doc(view, stages, query): 10 | query = wf.execute_stage( 11 | view, stages, 'alter_query', query 12 | ) 13 | res_obj = wf.loop.get_one_altered_result_object(view, stages, query) 14 | results = view.pj_shared.results = wf.Results( 15 | view, 16 | objects=[res_obj], 17 | many=False, 18 | is_top=True, 19 | not_found_message=view.not_found_message, 20 | ) 21 | 22 | # We have a result but we still need to fill the relationships. 23 | # Stage 'alter_result' will run on each related object. 24 | wf.loop.fill_result_object_related(res_obj, stages) 25 | 26 | return results.serialise() 27 | 28 | 29 | def workflow(view, stages): 30 | return get_doc(view, stages, view.single_item_query()) 31 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/item_patch.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import pyramid_jsonapi.workflow as wf 3 | import sqlalchemy 4 | 5 | from json.decoder import ( 6 | JSONDecodeError, 7 | ) 8 | from pyramid.httpexceptions import ( 9 | HTTPBadRequest, 10 | HTTPConflict, 11 | HTTPNotFound, 12 | ) 13 | from psycopg2.errors import ( # pylint: disable=no-name-in-module 14 | CheckViolation, 15 | ) 16 | from sqlalchemy.orm import ( 17 | load_only, 18 | ) 19 | from . import stages 20 | from .item_get import ( 21 | get_doc, 22 | ) 23 | 24 | 25 | def workflow(view, stages): 26 | validate_patch_request(view) 27 | data = view.request.json_body['data'] 28 | atts = {} 29 | hybrid_atts = {} 30 | for key, value in data.get('attributes', {}).items(): 31 | if key in view.attributes: 32 | atts[key] = value 33 | elif key in view.hybrid_attributes: 34 | hybrid_atts[key] = value 35 | else: 36 | raise HTTPNotFound( 37 | 'Collection {} has no attribute {}'.format( 38 | view.collection_name, key 39 | ) 40 | ) 41 | atts[view.key_column.name] = view.obj_id 42 | item = view.dbsession.merge(view.model(**atts)) 43 | for att, value in hybrid_atts.items(): 44 | try: 45 | setattr(item, att, value) 46 | except AttributeError: 47 | raise HTTPConflict( 48 | 'Attribute {} is read only.'.format( 49 | att 50 | ) 51 | ) 52 | 53 | rels = data.get('relationships', {}) 54 | for relname, reldict in rels.items(): 55 | try: 56 | rel = view.relationships[relname] 57 | except KeyError: 58 | raise HTTPNotFound( 59 | 'Collection {} has no relationship {}'.format( 60 | view.collection_name, relname 61 | ) 62 | ) 63 | rel_view = view.view_instance(rel.tgt_class) 64 | try: 65 | reldata = reldict['data'] 66 | except KeyError: 67 | raise HTTPBadRequest( 68 | "Relationship '{}' has no 'data' member.".format(relname) 69 | ) 70 | except TypeError: 71 | raise HTTPBadRequest( 72 | "Relationship '{}' is not a dictionary with a data member.".format(relname) 73 | ) 74 | if reldata is None: 75 | setattr(item, relname, None) 76 | elif isinstance(reldata, dict): 77 | if reldata.get('type') != rel_view.collection_name: 78 | raise HTTPConflict( 79 | 'Type {} does not match relationship type {}'.format( 80 | reldata.get('type', None), rel_view.collection_name 81 | ) 82 | ) 83 | if reldata.get('id') is None: 84 | raise HTTPBadRequest( 85 | 'An id is required in a resource identifier.' 86 | ) 87 | rel_item = view.dbsession.query( 88 | rel.tgt_class 89 | ).options( 90 | load_only(rel_view.key_column.name) 91 | ).get(reldata['id']) 92 | if not rel_item: 93 | raise HTTPNotFound('{}/{} not found'.format( 94 | rel_view.collection_name, reldata['id'] 95 | )) 96 | setattr(item, relname, rel_item) 97 | elif isinstance(reldata, list): 98 | rel_items = [] 99 | for res_ident in reldata: 100 | rel_item = view.dbsession.query( 101 | rel.tgt_class 102 | ).options( 103 | load_only(rel_view.key_column.name) 104 | ).get(res_ident['id']) 105 | if not rel_item: 106 | raise HTTPNotFound('{}/{} not found'.format( 107 | rel_view.collection_name, res_ident['id'] 108 | )) 109 | rel_items.append(rel_item) 110 | setattr(item, relname, rel_items) 111 | item = wf.execute_stage( 112 | view, stages, 'before_write_item', item 113 | ) 114 | try: 115 | view.dbsession.flush() 116 | except sqlalchemy.exc.IntegrityError as exc: 117 | if isinstance(exc.orig, CheckViolation): 118 | # Use a friendlier exception message for check constraint violations and show the 119 | # constraint definition. 120 | insp = sqlalchemy.inspect(view.dbsession.get_bind()) 121 | cons = insp.get_check_constraints(view.model.__table__.name) 122 | sqltext = '' 123 | for con in cons: 124 | if con.get('name', '') == exc.orig.diag.constraint_name: 125 | sqltext = con.get('sqltext', '') 126 | break 127 | raise HTTPConflict(str(exc.orig.diag.message_primary) + f'\n\nconstraint sql: {sqltext}') 128 | else: 129 | raise HTTPConflict(str(exc)) 130 | doc = get_doc( 131 | view, getattr(view, 'item_get').stages, view.single_item_query(view.obj_id) 132 | ) 133 | doc['meta'] = { 134 | 'updated': { 135 | 'attributes': [ 136 | att for att in itertools.chain(atts, hybrid_atts) 137 | if att != view.key_column.name 138 | ], 139 | 'relationships': [r for r in rels] 140 | } 141 | } 142 | # if an update is successful ... the server 143 | # responds only with top-level meta data 144 | return doc 145 | 146 | 147 | def validate_patch_request(view): 148 | request = view.request 149 | try: 150 | data = request.json_body['data'] 151 | except KeyError: 152 | raise HTTPBadRequest('data attribute required in PATCHes.') 153 | except JSONDecodeError as exc: 154 | raise HTTPBadRequest('Error decoding JSON body: {}.'.format(exc)) 155 | data_id = data.get('id') 156 | if view.collection_name != data.get('type'): 157 | raise HTTPConflict( 158 | 'JSON type ({}) does not match URL type ({}).'.format( 159 | data.get('type'), view.collection_name 160 | ) 161 | ) 162 | if data_id != view.obj_id: 163 | raise HTTPConflict( 164 | 'JSON id ({}) does not match URL id ({}).'.format( 165 | data_id, view.obj_id 166 | ) 167 | ) 168 | if not view.object_exists(view.obj_id): 169 | raise HTTPNotFound( 170 | 'No id {} in collection {}'.format( 171 | view.obj_id, 172 | view.collection_name 173 | ) 174 | ) 175 | return request 176 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/related_get.py: -------------------------------------------------------------------------------- 1 | import sqlalchemy 2 | import pyramid_jsonapi.workflow as wf 3 | 4 | from itertools import ( 5 | islice, 6 | ) 7 | from pyramid.httpexceptions import ( 8 | HTTPInternalServerError, 9 | HTTPBadRequest, 10 | ) 11 | from sqlalchemy.orm.interfaces import ( 12 | ONETOMANY, 13 | MANYTOMANY, 14 | MANYTOONE, 15 | ) 16 | from . import stages 17 | 18 | 19 | def get_results(view, stages): 20 | qinfo = view.rel_view.collection_query_info(view.request) 21 | rel_stages = getattr(view.rel_view, 'related_get').stages 22 | limit = qinfo['page[limit]'] 23 | count = None 24 | # We will need the original object with id view.obj_id. 25 | obj = wf.loop.get_one_altered_result_object( 26 | view, stages, view.single_item_query() 27 | ) 28 | if view.rel.queryable: 29 | query = view.related_query(obj.object, view.rel) 30 | else: 31 | rel_objs = getattr(obj.object, view.rel.name) 32 | # rel_objs = getattr(obj.object, view.rel.name) 33 | 34 | if view.rel.direction is ONETOMANY or view.rel.direction is MANYTOMANY: 35 | many = True 36 | if view.rel.queryable: 37 | query = view.rel_view.query_add_sorting(query) 38 | query = view.rel_view.query_add_filtering(query) 39 | query = wf.execute_stage(view.rel_view, rel_stages, 'alter_query', query) 40 | rel_objs_iterable = wf.wrapped_query_all(query) 41 | else: 42 | rel_objs_iterable = rel_objs 43 | objects_iterator = wf.loop.altered_objects_iterator( 44 | view.rel_view, rel_stages, 'alter_result', rel_objs_iterable 45 | ) 46 | offset_count = 0 47 | if 'page[offset]' in view.request.params: 48 | offset_count = sum(1 for _ in islice(objects_iterator, qinfo['page[offset]'])) 49 | res_objs = list(islice(objects_iterator, limit)) 50 | if qinfo['pj_include_count']: 51 | count = offset_count + len(res_objs) + sum(1 for _ in objects_iterator) 52 | else: 53 | many = False 54 | if view.rel.queryable: 55 | query = wf.execute_stage( 56 | view.rel_view, rel_stages, 'alter_query', query 57 | ) 58 | res_objs = [ 59 | wf.loop.get_one_altered_result_object( 60 | view.rel_view, rel_stages, query 61 | ) 62 | ] 63 | else: 64 | res_objs = [wf.ResultObject(view.rel_view, rel_objs)] 65 | if qinfo['pj_include_count']: 66 | count = 1 67 | 68 | results = wf.Results( 69 | view.rel_view, 70 | objects=res_objs, 71 | many=many, 72 | is_top=True, 73 | count=count, 74 | limit=limit 75 | ) 76 | 77 | # Fill the relationships with related objects. 78 | # Stage 'alter_result' will run on each object. 79 | for res_obj in results.objects: 80 | wf.loop.fill_result_object_related(res_obj, rel_stages) 81 | 82 | return results 83 | 84 | 85 | def workflow(view, stages): 86 | return get_results(view, stages).serialise() 87 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/relationships_delete.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | import sqlalchemy 3 | 4 | from pyramid.httpexceptions import ( 5 | HTTPInternalServerError, 6 | HTTPBadRequest, 7 | HTTPForbidden, 8 | HTTPConflict, 9 | HTTPFailedDependency, 10 | ) 11 | from sqlalchemy.orm.interfaces import ( 12 | ONETOMANY, 13 | MANYTOMANY, 14 | MANYTOONE, 15 | ) 16 | from . import stages 17 | 18 | 19 | def workflow(view, stages): 20 | if view.rel.direction is MANYTOONE: 21 | raise HTTPForbidden('Cannot DELETE to TOONE relationship link.') 22 | obj = view.dbsession.query(view.model).get(view.obj_id) 23 | 24 | for resid in view.request.json_body['data']: 25 | if resid['type'] != view.rel_view.collection_name: 26 | raise HTTPConflict( 27 | "Resource identifier type '{}' does not match relationship type '{}'.".format( 28 | resid['type'], view.rel_view.collection_name 29 | ) 30 | ) 31 | try: 32 | item = view.dbsession.query(view.rel_class).get(resid['id']) 33 | except sqlalchemy.exc.DataError as exc: 34 | raise HTTPBadRequest("invalid id '{}'".format(resid['id'])) 35 | if item is None: 36 | raise HTTPFailedDependency("One or more objects DELETEd from this relationship do not exist.") 37 | try: 38 | getattr(obj, view.relname).remove(item) 39 | except ValueError as exc: 40 | if exc.args[0].endswith('not in list'): 41 | # The item we were asked to remove is not there. 42 | pass 43 | else: 44 | raise 45 | obj = wf.execute_stage( 46 | view, stages, 'before_write_item', obj 47 | ) 48 | try: 49 | view.dbsession.flush() 50 | except sqlalchemy.exc.IntegrityError as exc: 51 | raise HTTPFailedDependency(str(exc)) 52 | return wf.Doc() 53 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/relationships_get.py: -------------------------------------------------------------------------------- 1 | from . import stages 2 | from .related_get import ( 3 | get_results, 4 | ) 5 | 6 | 7 | # Do what reated_get does but serialise as identifiers. 8 | def workflow(view, stages): 9 | return get_results(view, stages).serialise(identifiers=True) 10 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/relationships_patch.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | import sqlalchemy 3 | 4 | from pyramid.httpexceptions import ( 5 | HTTPInternalServerError, 6 | HTTPBadRequest, 7 | HTTPForbidden, 8 | HTTPConflict, 9 | HTTPFailedDependency, 10 | ) 11 | from sqlalchemy.orm.interfaces import ( 12 | ONETOMANY, 13 | MANYTOMANY, 14 | MANYTOONE, 15 | ) 16 | from . import stages 17 | from .related_get import ( 18 | get_results, 19 | ) 20 | 21 | 22 | def workflow(view, stages): 23 | obj = view.dbsession.query(view.model).get(view.obj_id) 24 | if view.rel.direction is MANYTOONE: 25 | local_col, _ = view.rel.obj.local_remote_pairs[0] 26 | resid = view.request.json_body['data'] 27 | if resid is None: 28 | setattr(obj, view.relname, None) 29 | else: 30 | if resid['type'] != view.rel_view.collection_name: 31 | raise HTTPConflict( 32 | "Resource identifier type '{}' does not match relationship type '{}'.".format( 33 | resid['type'], 34 | view.rel_view.collection_name 35 | ) 36 | ) 37 | setattr( 38 | obj, 39 | local_col.name, 40 | resid['id'] 41 | ) 42 | try: 43 | view.dbsession.flush() 44 | except sqlalchemy.exc.IntegrityError as exc: 45 | raise HTTPFailedDependency( 46 | 'Object {}/{} does not exist.'.format(resid['type'], resid['id']) 47 | ) 48 | except sqlalchemy.exc.DataError as exc: 49 | raise HTTPBadRequest("invalid id '{}'".format(resid['id'])) 50 | # Everything should be PATCHed now - return the relationship as 51 | # relationships_get would. 52 | return get_results(view, stages).serialise(identifiers=True) 53 | 54 | items = [] 55 | for resid in view.request.json_body['data']: 56 | if resid['type'] != view.rel_view.collection_name: 57 | raise HTTPConflict( 58 | "Resource identifier type '{}' does not match relationship type '{}'.".format( 59 | resid['type'], 60 | view.rel_view.collection_name 61 | ) 62 | ) 63 | try: 64 | newitem = view.dbsession.query(view.rel_class).get(resid['id']) 65 | except sqlalchemy.exc.DataError as exc: 66 | raise HTTPBadRequest("invalid id '{}'".format(resid['id'])) 67 | if newitem is None: 68 | raise HTTPFailedDependency("One or more objects POSTed to this relationship do not exist.") 69 | items.append(newitem) 70 | setattr(obj, view.relname, items) 71 | obj = wf.execute_stage( 72 | view, stages, 'before_write_item', obj 73 | ) 74 | try: 75 | view.dbsession.flush() 76 | except sqlalchemy.exc.IntegrityError as exc: 77 | raise HTTPFailedDependency(str(exc)) 78 | except sqlalchemy.orm.exc.FlushError as exc: 79 | if str(exc).startswith("Can't flush None value"): 80 | raise HTTPFailedDependency("One or more objects PATCHed to this relationship do not exist.") 81 | else: 82 | # Catch-all. Shouldn't reach here. 83 | raise # pragma: no cover 84 | 85 | # Everything should be PATCHed now - return the relationship as 86 | # relationships_get would. 87 | return get_results(view, stages).serialise(identifiers=True) 88 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/loop/relationships_post.py: -------------------------------------------------------------------------------- 1 | import pyramid_jsonapi.workflow as wf 2 | import sqlalchemy 3 | 4 | from pyramid.httpexceptions import ( 5 | HTTPInternalServerError, 6 | HTTPBadRequest, 7 | HTTPForbidden, 8 | HTTPConflict, 9 | HTTPFailedDependency, 10 | ) 11 | from sqlalchemy.orm.interfaces import ( 12 | ONETOMANY, 13 | MANYTOMANY, 14 | MANYTOONE, 15 | ) 16 | from . import stages 17 | from .related_get import ( 18 | get_results, 19 | ) 20 | 21 | 22 | def workflow(view, stages): 23 | if view.rel.direction is MANYTOONE: 24 | raise HTTPForbidden('Cannot POST to TOONE relationship link.') 25 | 26 | # Alter data with any callbacks 27 | data = view.request.json_body['data'] 28 | 29 | obj = view.dbsession.query(view.model).get(view.obj_id) 30 | items = [] 31 | for resid in data: 32 | if resid['type'] != view.rel_view.collection_name: 33 | raise HTTPConflict( 34 | "Resource identifier type '{}' does not match relationship type '{}'.".format( 35 | resid['type'], view.rel_view.collection_name 36 | ) 37 | ) 38 | try: 39 | newitem = view.dbsession.query(view.rel_class).get(resid['id']) 40 | except sqlalchemy.exc.DataError as exc: 41 | raise HTTPBadRequest("invalid id '{}'".format(resid['id'])) 42 | if newitem is None: 43 | raise HTTPFailedDependency("One or more objects POSTed to this relationship do not exist.") 44 | items.append(newitem) 45 | getattr(obj, view.relname).extend(items) 46 | obj = wf.execute_stage( 47 | view, stages, 'before_write_item', obj 48 | ) 49 | try: 50 | view.dbsession.flush() 51 | except sqlalchemy.exc.IntegrityError as exc: 52 | if 'duplicate key value violates unique constraint' in str(exc): 53 | # This happens when using an association proxy if we attempt to 54 | # add an object to the relationship that's already there. We 55 | # want this to be a no-op. 56 | pass 57 | else: 58 | raise HTTPFailedDependency(str(exc)) 59 | except sqlalchemy.orm.exc.FlushError as exc: 60 | if str(exc).startswith("Can't flush None value"): 61 | raise HTTPFailedDependency("One or more objects POSTed to this relationship do not exist.") 62 | else: 63 | # Catch-all. Shouldn't reach here. 64 | raise # pragma: no cover 65 | 66 | # Everything should be done now - return the relationship as 67 | # relationships_get would. 68 | return get_results(view, stages).serialise(identifiers=True) 69 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/selectin/__init__.py: -------------------------------------------------------------------------------- 1 | stages = ( 2 | 'alter_result', 3 | ) 4 | -------------------------------------------------------------------------------- /pyramid_jsonapi/workflow/selectin/collection_get.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pyramid_jsonapi.workflow as wf 3 | import sqlalchemy 4 | import time 5 | 6 | from dataclasses import dataclass 7 | from itertools import islice 8 | from pyramid.httpexceptions import ( 9 | HTTPBadRequest, 10 | HTTPInternalServerError, 11 | ) 12 | from sqlalchemy.orm import selectinload 13 | from sqlalchemy.ext.associationproxy import ASSOCIATION_PROXY 14 | from sqlalchemy.orm.relationships import RelationshipProperty 15 | 16 | from . import stages 17 | from pyramid_jsonapi.authoriser import Authoriser 18 | from pyramid_jsonapi.db_query import RQLQuery 19 | from pyramid_jsonapi.http_query import QueryInfo, longest_includes, includes 20 | from pyramid_jsonapi.serialiser import Serialiser 21 | 22 | log = logging.getLogger(__name__) 23 | 24 | 25 | def rel_opt(rel, so_far=None): 26 | if isinstance(rel.obj, RelationshipProperty): 27 | if so_far: 28 | return so_far.selectinload(rel.instrumented) 29 | return selectinload(rel.instrumented) 30 | elif rel.obj.extension_type is ASSOCIATION_PROXY: 31 | ps = rel.obj.for_class(rel.src_class) 32 | if so_far: 33 | return so_far.selectinload(ps.local_attr).selectinload(ps.remote_attr) 34 | return selectinload(ps.local_attr).selectinload(ps.remote_attr) 35 | return None 36 | 37 | 38 | def rel_opts(view, so_far=None): 39 | options = [] 40 | for rel_name in view.requested_relationships.keys() & view.allowed_fields: 41 | rel = view.relationships[rel_name] 42 | opt = rel_opt(rel, so_far) 43 | if opt is not None: 44 | options.append(opt) 45 | return options 46 | 47 | 48 | def selectin_options(view): 49 | options = [] 50 | options.extend(rel_opts(view)) 51 | longest = longest_includes(includes(view.request)) 52 | for include in longest: 53 | cur_view = view 54 | so_far = None 55 | for rel_name in include: 56 | rel = cur_view.relationships[rel_name] 57 | so_far = rel_opt(rel, so_far) 58 | rel_view = cur_view.view_instance(rel.tgt_class) 59 | options.extend(rel_opts(rel_view, so_far)) 60 | cur_view = rel_view 61 | return options 62 | 63 | 64 | def workflow(view, stages): 65 | wf_start = time.time() 66 | log.debug(f'{wf_start} start selectin workflow') 67 | # qinfo = view.query_info 68 | qinfo = QueryInfo(view.__class__, view.request) 69 | pinfo = qinfo.paging_info 70 | count = None 71 | 72 | # query = view.base_collection_query() 73 | query = RQLQuery.from_view(view, loadonly=None) 74 | query = view.query_add_sorting(query, reversed=pinfo.needs_reversed) 75 | query = view.query_add_filtering(query) 76 | if pinfo.is_relative: 77 | query = query.add_relative_paging() 78 | 79 | query = query.options(*selectin_options(view)) 80 | 81 | items_iterator = query.iterate_paged(pinfo.limit) 82 | before_items = time.time() 83 | authoriser = Authoriser(view) 84 | if pinfo.start_type == 'offset' and pinfo.offset > 0: 85 | authz_items_no_record = authoriser.iterate_authorised_items(items_iterator, errors=None) 86 | next(islice(authz_items_no_record, pinfo.offset, pinfo.offset), None) 87 | errors = {'objects': {}, 'attributes': {}, 'relationships': {}} 88 | authz_items = authoriser.iterate_authorised_items(items_iterator, errors) 89 | items = list(islice(authz_items, pinfo.limit)) 90 | log.debug(f'items fetched in {time.time() - before_items}') 91 | if pinfo.needs_reversed: 92 | items.reverse() 93 | 94 | if qinfo.pj_include_count: 95 | count = RQLQuery.from_view(view).id_only().add_filtering().pj_count() 96 | before_serialise = time.time() 97 | doc = Serialiser(view, authoriser).serialise(items, pinfo.limit, available=count, errors=errors) 98 | log.debug(f'items serialised in {time.time() - before_serialise}') 99 | return doc 100 | 101 | 102 | def full_search_count(view, stages): 103 | # Same as normal query but only id column and don't bother with sorting. 104 | query = view.base_collection_query(loadonly=[view.key_column.name]) 105 | query = view.query_add_filtering(query) 106 | query = wf.execute_stage( 107 | view, stages, 'alter_query', query 108 | ) 109 | objects_iterator = wf.loop.altered_objects_iterator( 110 | view, stages, 'alter_result', wf.wrapped_query_all(query) 111 | ) 112 | return sum(1 for _ in objects_iterator) 113 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from setuptools import setup, find_packages 3 | # Append project to sys.path so that we can import version 'directly'. 4 | # Importing as 'from pyramid_jsonapi import version' needs the deps we 5 | # haven't installed yet! 6 | sys.path.append("pyramid_jsonapi") 7 | from version import get_version 8 | 9 | requires = [ 10 | 'alchemyjsonschema', 11 | 'cachetools', 12 | 'jsonschema', 13 | 'pkginfo', 14 | 'pyramid', 15 | 'pyramid_mako', 16 | 'pyramid_settings_wrapper', 17 | 'pyyaml>=5.1', # openapi-spec-validator requires >= 5.1 18 | 'rqlalchemy', 19 | 'SQLAlchemy', 20 | ] 21 | 22 | setup( 23 | name = 'pyramid_jsonapi', 24 | packages = find_packages(), 25 | install_requires=requires, 26 | version=get_version(), 27 | description = 'Auto-build JSON API from sqlalchemy models using the pyramid framework', 28 | author = 'Colin Higgs', 29 | author_email = 'colin.higgs70@gmail.com', 30 | license = 'GNU Affero General Public License v3 or later (AGPLv3+)', 31 | url = 'https://github.com/colinhiggs/pyramid-jsonapi', 32 | keywords = ['json', 'api', 'json-api', 'jsonapi', 'jsonschema', 'openapi', 'pyramid', 'sqlalchemy'], 33 | classifiers = [ 34 | 'Development Status :: 5 - Production/Stable', 35 | 'Framework :: Pyramid', 36 | 'Intended Audience :: Developers', 37 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', 38 | 'Programming Language :: Python :: 3', 39 | 'Programming Language :: Python :: 3.4', 40 | 'Programming Language :: Python :: 3.5', 41 | 'Programming Language :: Python :: 3.6', 42 | 'Topic :: Internet :: WWW/HTTP', 43 | 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 44 | 'Topic :: Software Development :: Libraries :: Application Frameworks', 45 | 'Topic :: Software Development :: Libraries :: Python Modules', 46 | ], 47 | package_data={'': ['schema/*.json', 48 | 'metadata/OpenAPI/swagger-ui/*.mako']} 49 | ) 50 | -------------------------------------------------------------------------------- /test_project/CHANGES.txt: -------------------------------------------------------------------------------- 1 | 0.0 2 | --- 3 | 4 | - Initial version 5 | -------------------------------------------------------------------------------- /test_project/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.txt *.ini *.cfg *.rst 2 | recursive-include test_project *.ico *.png *.css *.gif *.jpg *.pt *.txt *.mak *.mako *.js *.html *.xml 3 | -------------------------------------------------------------------------------- /test_project/README.txt: -------------------------------------------------------------------------------- 1 | test_project README 2 | ================== 3 | 4 | Getting Started 5 | --------------- 6 | 7 | - cd 8 | 9 | - $venv/bin/python setup.py develop 10 | 11 | - $venv/bin/initialize_test_project_db development.ini 12 | 13 | - $venv/bin/pserve development.ini 14 | 15 | -------------------------------------------------------------------------------- /test_project/development.ini: -------------------------------------------------------------------------------- 1 | ### 2 | # app configuration 3 | # http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html 4 | ### 5 | 6 | [app:main] 7 | use = egg:test_project 8 | 9 | pyramid.reload_templates = true 10 | pyramid.debug_authorization = false 11 | pyramid.debug_notfound = false 12 | pyramid.debug_routematch = false 13 | pyramid.default_locale_name = en 14 | pyramid.includes = 15 | pyramid_debugtoolbar 16 | pyramid_tm 17 | 18 | sqlalchemy.url = postgresql://test:test@127.0.0.1:5432/test 19 | 20 | pyramid_jsonapi.debug_meta = false 21 | pyramid_jsonapi.debug_endpoints = true 22 | pyramid_jsonapi.debug_test_data_module = test_project.test_data 23 | pyramid_jsonapi.debug_traceback = true 24 | 25 | pyramid_jsonapi.route_name_prefix = pyramid_jsonapi 26 | pyramid_jsonapi.route_pattern_prefix = 27 | pyramid_jsonapi.paging_default_limit = 10 28 | pyramid_jsonapi.paging_max_limit = 100 29 | pyramid_jsonapi.allow_client_ids = true 30 | pyramid_jsonapi.transaction_isolation_level = SERIALIZABLE 31 | pyramid_jsonapi.load_strategy = loop 32 | pyramid_jsonapi.save_strategy = loop 33 | pj_wf_modules = pyramid_jsonapi.workflow 34 | pj_wf_load_modules = %(pj_wf_modules)s.%(pyramid_jsonapi.load_strategy)s 35 | pj_wf_save_modules = %(pj_wf_modules)s.%(pyramid_jsonapi.save_strategy)s 36 | pyramid_jsonapi.workflow_get = %(pj_wf_load_modules)s.get 37 | pyramid_jsonapi.workflow_patch = %(pj_wf_save_modules)s.patch 38 | pyramid_jsonapi.workflow_delete = %(pj_wf_save_modules)s.delete 39 | pyramid_jsonapi.workflow_collection_get = %(pj_wf_load_modules)s.collection_get 40 | pyramid_jsonapi.workflow_collection_post = %(pj_wf_save_modules)s.collection_post 41 | pyramid_jsonapi.workflow_related_get = %(pj_wf_load_modules)s.related_get 42 | pyramid_jsonapi.workflow_relationships_get = %(pj_wf_load_modules)s.relationships_get 43 | pyramid_jsonapi.workflow_relationships_post = %(pj_wf_save_modules)s.relationships_post 44 | pyramid_jsonapi.workflow_relationships_patch = %(pj_wf_save_modules)s.relationships_patch 45 | pyramid_jsonapi.workflow_relationships_delete = %(pj_wf_save_modules)s.relationships_delete 46 | 47 | 48 | # By default, the toolbar only appears for clients from IP addresses 49 | # '127.0.0.1' and '::1'. 50 | # debugtoolbar.hosts = 127.0.0.1 ::1 51 | 52 | ### 53 | # wsgi server configuration 54 | ### 55 | 56 | [server:main] 57 | use = egg:waitress#main 58 | host = 0.0.0.0 59 | port = 6544 60 | 61 | ### 62 | # logging configuration 63 | # http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html 64 | ### 65 | 66 | [loggers] 67 | keys = root, test_project, sqlalchemy 68 | 69 | [handlers] 70 | keys = console 71 | 72 | [formatters] 73 | keys = generic 74 | 75 | [logger_root] 76 | level = INFO 77 | handlers = console 78 | 79 | [logger_test_project] 80 | level = DEBUG 81 | handlers = 82 | qualname = test_project 83 | 84 | [logger_sqlalchemy] 85 | level = INFO 86 | handlers = 87 | qualname = sqlalchemy.engine 88 | # "level = INFO" logs SQL queries. 89 | # "level = DEBUG" logs SQL queries and results. 90 | # "level = WARN" logs neither. (Recommended for production systems.) 91 | 92 | [handler_console] 93 | class = StreamHandler 94 | args = (sys.stderr,) 95 | level = NOTSET 96 | formatter = generic 97 | 98 | [formatter_generic] 99 | format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s 100 | 101 | [pshell] 102 | m = test_project.models 103 | ses = test_project.models.DBSession 104 | t = transaction 105 | po = test_project.play_objects 106 | -------------------------------------------------------------------------------- /test_project/production.ini: -------------------------------------------------------------------------------- 1 | ### 2 | # app configuration 3 | # http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html 4 | ### 5 | 6 | [app:main] 7 | use = egg:test_project 8 | 9 | pyramid.reload_templates = false 10 | pyramid.debug_authorization = false 11 | pyramid.debug_notfound = false 12 | pyramid.debug_routematch = false 13 | pyramid.default_locale_name = en 14 | pyramid.includes = 15 | pyramid_tm 16 | 17 | sqlalchemy.url = postgresql:///jsonapi_test 18 | 19 | [server:main] 20 | use = egg:waitress#main 21 | host = 0.0.0.0 22 | port = 6543 23 | 24 | ### 25 | # logging configuration 26 | # http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html 27 | ### 28 | 29 | [loggers] 30 | keys = root, test_project, sqlalchemy 31 | 32 | [handlers] 33 | keys = console 34 | 35 | [formatters] 36 | keys = generic 37 | 38 | [logger_root] 39 | level = WARN 40 | handlers = console 41 | 42 | [logger_test_project] 43 | level = WARN 44 | handlers = 45 | qualname = test_project 46 | 47 | [logger_sqlalchemy] 48 | level = WARN 49 | handlers = 50 | qualname = sqlalchemy.engine 51 | # "level = INFO" logs SQL queries. 52 | # "level = DEBUG" logs SQL queries and results. 53 | # "level = WARN" logs neither. (Recommended for production systems.) 54 | 55 | [handler_console] 56 | class = StreamHandler 57 | args = (sys.stderr,) 58 | level = NOTSET 59 | formatter = generic 60 | 61 | [formatter_generic] 62 | format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s 63 | -------------------------------------------------------------------------------- /test_project/run-app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import testing.postgresql 3 | 4 | project_name = "test_project" 5 | db_dir = "{}_db".format(project_name) 6 | db_port = 54323 7 | 8 | make_db = True 9 | try: 10 | os.mkdir(db_dir) 11 | except OSError: 12 | make_db = False 13 | 14 | # Launch new PostgreSQL server 15 | print("Setting up postgres DB...") 16 | with testing.postgresql.Postgresql(name=project_name, port=db_port, base_dir=db_dir) as postgresql: 17 | # connect to PostgreSQL 18 | print(postgresql.url(), db_dir) 19 | if make_db: 20 | # Enable plugin for uuid generation 21 | os.system("""psql -d {} -c 'CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'""".format(postgresql.url())) 22 | print("Initializing db") 23 | os.system("bin/python bin/initialize_{0}_db {0}/development.ini".format(project_name)) 24 | else: 25 | print("Re-using existing DB.") 26 | print("Starting gunicorn") 27 | os.system("bin/python bin/gunicorn --reload --paste {}/development.ini --capture-output".format(project_name)) 28 | -------------------------------------------------------------------------------- /test_project/setup.cfg: -------------------------------------------------------------------------------- 1 | [nosetests] 2 | match=^test 3 | nocapture=1 4 | cover-package=test_project 5 | with-coverage=1 6 | cover-erase=1 7 | 8 | [compile_catalog] 9 | directory = test_project/locale 10 | domain = test_project 11 | statistics = true 12 | 13 | [extract_messages] 14 | add_comments = TRANSLATORS: 15 | output_file = test_project/locale/test_project.pot 16 | width = 80 17 | 18 | [init_catalog] 19 | domain = test_project 20 | input_file = test_project/locale/test_project.pot 21 | output_dir = test_project/locale 22 | 23 | [update_catalog] 24 | domain = test_project 25 | input_file = test_project/locale/test_project.pot 26 | output_dir = test_project/locale 27 | previous = true 28 | -------------------------------------------------------------------------------- /test_project/setup.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import re 4 | 5 | from setuptools import setup, find_packages 6 | 7 | here = os.path.abspath(os.path.dirname(__file__)) 8 | README = open(os.path.join(here, 'README.txt')).read() 9 | CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() 10 | 11 | 12 | def local_ltree_pkg(): 13 | list_of_files = glob.glob('/home/chiggs1/git/ltree_models/dist/*.tar.gz') 14 | return max(list_of_files, key=os.path.getctime) 15 | 16 | def ltree_version(path): 17 | fname = os.path.basename(path) 18 | match = re.search(r'ltree_models-(.*)\.tar\.gz', fname) 19 | return match.group(1) 20 | 21 | requires = [ 22 | # f'ltree @ file://localhost{local_ltree_pkg()}', 23 | 'ltree_models', 24 | 'openapi_spec_validator', 25 | 'psycopg2-binary', 26 | 'pyramid', 27 | 'pyramid_debugtoolbar', 28 | 'pyramid_jsonapi', 29 | 'pyramid_tm', 30 | 'SQLAlchemy', 31 | 'testing.postgresql', 32 | 'transaction', 33 | 'waitress', 34 | 'webtest', 35 | 'zope.sqlalchemy', 36 | 'parameterized', 37 | ] 38 | 39 | setup(name='test_project', 40 | version='1.0', 41 | description='test_project', 42 | long_description=README + '\n\n' + CHANGES, 43 | classifiers=[ 44 | "Programming Language :: Python", 45 | "Framework :: Pyramid", 46 | "Topic :: Internet :: WWW/HTTP", 47 | "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", 48 | ], 49 | author='', 50 | author_email='', 51 | url='', 52 | keywords='web wsgi bfg pylons pyramid', 53 | packages=find_packages(), 54 | include_package_data=True, 55 | zip_safe=False, 56 | test_suite='test_project', 57 | install_requires=requires, 58 | entry_points="""\ 59 | [paste.app_factory] 60 | main = test_project:main 61 | [console_scripts] 62 | initialize_test_project_db = test_project.scripts.initializedb:main 63 | """, 64 | ) 65 | -------------------------------------------------------------------------------- /test_project/test_project/__init__.py: -------------------------------------------------------------------------------- 1 | from pyramid.config import Configurator 2 | from sqlalchemy import engine_from_config 3 | from pyramid.renderers import JSON 4 | from . import views 5 | 6 | # The jsonapi module. 7 | import pyramid_jsonapi 8 | import pyramid_jsonapi.workflow as wf 9 | 10 | # Import models as a module: needed for create_jsonapi... 11 | from . import models 12 | from . import models2 13 | 14 | from pyramid.httpexceptions import ( 15 | HTTPForbidden 16 | ) 17 | 18 | test_settings = { 19 | 'models_iterable': { 20 | 'module': models, 21 | 'list': [models.Blog, models.Person, models.Post], 22 | 'composite_key': [models2.CompositeKey] 23 | } 24 | } 25 | 26 | # Used to test that adding JSON adapters works. 27 | import datetime 28 | def datetime_adapter(obj, request): 29 | return obj.isoformat() 30 | 31 | # Make sure the schema generator understands some types from sqlalchemy_utils. 32 | import sqlalchemy_utils 33 | import alchemyjsonschema 34 | alchemyjsonschema.default_column_to_schema.update( 35 | { 36 | sqlalchemy_utils.LtreeType: "string" 37 | } 38 | ) 39 | 40 | 41 | def main(global_config, **settings): 42 | """ This function returns a Pyramid WSGI application. 43 | """ 44 | # The usual stuff from the pyramid alchemy scaffold. 45 | engine = engine_from_config(settings, 'sqlalchemy.') 46 | models.DBSession.configure(bind=engine) 47 | models.Base.metadata.bind = engine 48 | config = Configurator(settings=settings) 49 | config.add_static_view('static', 'static', cache_max_age=3600) 50 | config.add_route('home', '/') 51 | config.add_route('echo', '/echo/{type}') 52 | config.scan(views) 53 | 54 | # Set up the renderer. 55 | renderer = JSON() 56 | renderer.add_adapter(datetime.date, datetime_adapter) 57 | config.add_renderer('json', renderer) 58 | 59 | # Lines specific to pyramid_jsonapi. 60 | # Create an API instance. 61 | pj = pyramid_jsonapi.PyramidJSONAPI( 62 | config, 63 | test_settings['models_iterable'][ 64 | settings.get('pyramid_jsonapi_tests.models_iterable', 'module') 65 | ], 66 | lambda view: models.DBSession 67 | ) 68 | # Register a bad filter operator for test purposes. 69 | pj.filter_registry.register('bad_op') 70 | # Create the routes and views automagically. 71 | pj.create_jsonapi_using_magic_and_pixie_dust() 72 | 73 | person_view = pj.view_classes[models.Person] 74 | blogs_view = pj.view_classes[models.Blog] 75 | def sh_add_some_info(doc, view, stage, view_method): 76 | doc['meta']['added'] = 'some random info' 77 | return doc 78 | 79 | # Add some random information via the alter_document stage. 80 | person_view.item_get.stages['alter_document'].append(sh_add_some_info) 81 | 82 | # Apply GET permission handlers at the alter_direct_results and 83 | # alter_related_results stages. 84 | # pj.enable_permission_handlers('get', ['alter_direct_results', 'alter_related_results']) 85 | 86 | # Add permission filters to do the logic of accepting or rejecting items. 87 | # person_view.register_permission_filter( 88 | # ['get'], 89 | # ['alter_direct_results', 'alter_related_results'], 90 | # lambda obj, *args, **kwargs: obj.object.name != 'alice', 91 | # ) 92 | # blogs_view.register_permission_filter( 93 | # ['get'], 94 | # ['alter_direct_results', 'alter_related_results'], 95 | # lambda obj, *args, **kwargs: obj.object.id != 3, 96 | # ) 97 | 98 | # Back to the usual pyramid stuff. 99 | app = config.make_wsgi_app() 100 | app.pj = pj 101 | return app 102 | -------------------------------------------------------------------------------- /test_project/test_project/models.py: -------------------------------------------------------------------------------- 1 | from ltree_models import ( 2 | LtreeMixin, 3 | ) 4 | from sqlalchemy import ( 5 | Table, 6 | Column, 7 | Index, 8 | Integer, 9 | Text, 10 | BigInteger, 11 | DateTime, 12 | ForeignKey, 13 | UniqueConstraint, 14 | CheckConstraint, 15 | func, 16 | select, 17 | ) 18 | from sqlalchemy.dialects.postgresql import JSONB 19 | from sqlalchemy.ext.associationproxy import association_proxy 20 | from sqlalchemy.ext.declarative import declarative_base 21 | from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method 22 | from sqlalchemy.orm import ( 23 | scoped_session, 24 | sessionmaker, 25 | relationship, 26 | backref, 27 | foreign, 28 | remote, 29 | ) 30 | from sqlalchemy.orm.interfaces import ( 31 | ONETOMANY, 32 | MANYTOMANY, 33 | MANYTOONE, 34 | ) 35 | from zope.sqlalchemy import register 36 | 37 | DBSession = scoped_session(sessionmaker()) 38 | register(DBSession) 39 | Base = declarative_base() 40 | 41 | IdType = BigInteger 42 | def IdColumn(): 43 | '''Convenience function: the default Column for object ids.''' 44 | return Column(IdType, primary_key=True, autoincrement=True) 45 | def IdRefColumn(reference, *args, **kwargs): 46 | '''Convenience function: the default Column for references to object ids.''' 47 | return Column(IdType, ForeignKey(reference), *args, **kwargs) 48 | 49 | authors_articles_assoc = Table( 50 | 'authors_articles_assoc', 51 | Base.metadata, 52 | IdRefColumn('people.id', name='author_id', primary_key=True), 53 | IdRefColumn('articles_by_assoc.articles_by_assoc_id', name='article_id', 54 | primary_key=True) 55 | ) 56 | 57 | class Person(Base): 58 | __tablename__ = 'people' 59 | id = IdColumn() 60 | name = Column(Text) 61 | age = Column(Integer) 62 | invisible = Column(Text) 63 | @hybrid_property 64 | def invisible_hybrid(self): 65 | return 'boo!' 66 | 67 | blogs = relationship('Blog', backref='owner') 68 | posts = relationship('Post', backref='author') 69 | comments = relationship('Comment', backref='author') 70 | invisible_comments = relationship('Comment') 71 | articles_by_assoc = relationship( 72 | "ArticleByAssoc", 73 | secondary=authors_articles_assoc, 74 | backref="authors" 75 | ) 76 | article_associations = relationship( 77 | 'ArticleAuthorAssociation', 78 | cascade='all, delete-orphan', 79 | backref='author' 80 | ) 81 | articles_by_proxy = association_proxy('article_associations', 'article') 82 | # A relationship that doesn't join along the usual fk -> pk lines. 83 | blogs_from_titles = relationship( 84 | 'Blog', 85 | primaryjoin="remote(Blog.title).like('%' + foreign(Person.name))", 86 | viewonly=True, 87 | uselist=True, 88 | ) 89 | 90 | 91 | # make invisible columns invisible to API 92 | invisible.info.update({'pyramid_jsonapi': {'visible': False}}) 93 | invisible_hybrid.info.update({'pyramid_jsonapi': {'visible': False}}) 94 | invisible_comments.info.update({'pyramid_jsonapi': {'visible': False}}) 95 | 96 | 97 | class Blog(Base): 98 | __tablename__ = 'blogs' 99 | __table_args__ = ( 100 | CheckConstraint('owner_id != 3'), 101 | CheckConstraint("title != 'forbidden title'") 102 | ) 103 | id = IdColumn() 104 | title = Column(Text) 105 | owner_id = IdRefColumn('people.id') 106 | # A read only hybrid property 107 | @hybrid_property 108 | def owner_name(self): 109 | try: 110 | return self.owner.name 111 | except AttributeError: 112 | # No owner 113 | return None 114 | 115 | posts = relationship('Post', backref='blog') 116 | # Using a hybrid property as a ONETOMANY relationship. 117 | @hybrid_property 118 | def posts_authors(self): 119 | # Return the authors of all of the posts (as objects, like a relationship) 120 | authors = set() 121 | for post in self.posts: 122 | authors.add(post.author) 123 | return list(authors) 124 | posts_authors.info['pyramid_jsonapi'] = { 125 | 'relationship': { 126 | 'direction': ONETOMANY, 127 | 'queryable': False, 128 | 'tgt_class': 'Person', 129 | } 130 | } 131 | 132 | 133 | class Post(Base): 134 | __tablename__ = 'posts' 135 | id = IdColumn() 136 | title = Column(Text) 137 | content = Column(Text) 138 | published_at = Column(DateTime, nullable=False, server_default=func.now()) 139 | json_content = Column(JSONB) 140 | blog_id = IdRefColumn('blogs.id') 141 | author_id = IdRefColumn('people.id', nullable=False) 142 | # A read-write hybrid property 143 | @hybrid_property 144 | def author_name(self): 145 | author_name = None 146 | try: 147 | author_name = self.author.name 148 | except AttributeError: 149 | # No author 150 | pass 151 | return author_name 152 | @author_name.setter 153 | def author_name(self, name): 154 | self.author.name = name 155 | 156 | comments = relationship('Comment', backref = 'post') 157 | # Using a hybrid property as a MANYTOONE relationship. 158 | @hybrid_property 159 | def blog_owner(self): 160 | # Return the owner of the blog this post is in (as an object, like a 161 | # relationship) 162 | return self.blog.owner 163 | blog_owner.info['pyramid_jsonapi'] = { 164 | 'relationship': { 165 | 'direction': MANYTOONE, 166 | 'queryable': False, 167 | 'tgt_class': Person, 168 | } 169 | } 170 | 171 | 172 | class Comment(Base): 173 | __tablename__ = 'comments' 174 | comments_id = IdColumn() 175 | content = Column(Text) 176 | author_id = IdRefColumn('people.id') 177 | post_id = IdRefColumn('posts.id') 178 | type = Column(Text) 179 | 180 | __mapper_args__ = { 181 | 'polymorphic_identity': 'comments', 182 | 'polymorphic_on': 'type' 183 | } 184 | 185 | 186 | class BenignComment(Comment): 187 | __tablename__ = 'benign_comments' 188 | comments_id = IdRefColumn( 189 | 'comments.comments_id', 190 | primary_key=True 191 | ) 192 | fawning_text = Column(Text) 193 | __mapper_args__ = { 194 | 'polymorphic_identity': 'benign_comments' 195 | } 196 | 197 | 198 | class VitriolicComment(Comment): 199 | __tablename__ = 'vitriolic_comments' 200 | comments_id = IdRefColumn( 201 | 'comments.comments_id', 202 | primary_key=True 203 | ) 204 | scathing_text = Column(Text) 205 | __mapper_args__ = { 206 | 'polymorphic_identity': 'vitriolic_comments' 207 | } 208 | 209 | 210 | class ArticleByAssoc(Base): 211 | __tablename__ = 'articles_by_assoc' 212 | articles_by_assoc_id = IdColumn() 213 | title = Column(Text, nullable=False) 214 | content = Column(Text) 215 | published_at = Column(DateTime) 216 | 217 | 218 | class ArticleByObj(Base): 219 | __tablename__ = 'articles_by_obj' 220 | articles_by_obj_id = IdColumn() 221 | title = Column(Text, nullable=False) 222 | content = Column(Text) 223 | published_at = Column(DateTime) 224 | author_associations = relationship( 225 | 'ArticleAuthorAssociation', 226 | cascade='all, delete-orphan', 227 | backref='article' 228 | ) 229 | authors_by_proxy = association_proxy('author_associations', 'author') 230 | 231 | 232 | class ArticleAuthorAssociation(Base): 233 | __tablename__ = 'article_author_associations' 234 | article_author_associations_id = IdColumn() 235 | article_id = IdRefColumn( 236 | 'articles_by_obj.articles_by_obj_id', 237 | # nullable=False 238 | ) 239 | author_id = IdRefColumn( 240 | 'people.id', 241 | # nullable=False 242 | ) 243 | date_joined = Column(DateTime, server_default=func.now()) 244 | 245 | # __table_args__ = ( 246 | # UniqueConstraint('article_id', 'author_id'), 247 | # ) 248 | 249 | def __init__( 250 | self, article=None, author=None, date_joined=None, 251 | article_author_associations_id=None, 252 | article_id=None, 253 | author_id=None 254 | ): 255 | if article is not None: 256 | self.article = article 257 | if author is not None: 258 | self.author = author 259 | self.date_joined = date_joined 260 | if self.date_joined is None: 261 | self.date_joined = func.now() 262 | if article_author_associations_id is not None: 263 | self.article_author_associations_id = article_author_associations_id 264 | if article_id is not None: 265 | self.article_id = article_id 266 | if author_id is not None: 267 | self.author_id = author_id 268 | 269 | 270 | class RenamedThings(Base): 271 | __tablename__ = 'things' 272 | id = IdColumn() 273 | stuff = Column(Text) 274 | __pyramid_jsonapi__ = { 275 | 'collection_name': 'whatsits' 276 | } 277 | 278 | 279 | class TreeNode(Base): 280 | __tablename__ = 'treenodes' 281 | id = IdColumn() 282 | name = Column(Text) 283 | parent_id = IdRefColumn('treenodes.id') 284 | children = relationship("TreeNode", 285 | backref=backref('parent', remote_side=[id]) 286 | ) 287 | 288 | 289 | class PersonView(Base): 290 | __table__ = select(Person).subquery() 291 | 292 | posts = relationship('Post', backref='view_author') 293 | 294 | __pyramid_jsonapi__ = { 295 | 'collection_name': 'view_people', 296 | } 297 | 298 | 299 | class LtreeNode(Base, LtreeMixin): 300 | __tablename__ = 'ltree_nodes' 301 | 302 | id = IdColumn() 303 | 304 | 305 | # association object for many to many management model. 306 | class ManagerAssociation(Base): 307 | __tablename__ = 'managers' 308 | 309 | id = IdColumn() 310 | boss_id = IdRefColumn('jobs.id') 311 | minion_id = IdRefColumn('jobs.id') 312 | 313 | # relationships from backrefs: 314 | # boss (other end of Job.ao_minions) 315 | # minion (other end of Job.ao_bosses) 316 | 317 | 318 | class Job(Base): 319 | __tablename__ = 'jobs' 320 | 321 | id = IdColumn() 322 | title = Column(Text) 323 | 324 | minions = association_proxy('ao_minions', 'minion') 325 | ao_minions = relationship( 326 | "ManagerAssociation", 327 | foreign_keys=[ManagerAssociation.boss_id], 328 | backref=backref("boss") 329 | ) 330 | 331 | bosses = association_proxy('ao_bosses', 'boss') 332 | ao_bosses = relationship( 333 | "ManagerAssociation", 334 | foreign_keys=[ManagerAssociation.minion_id], 335 | backref=backref("minion") 336 | ) 337 | -------------------------------------------------------------------------------- /test_project/test_project/models2.py: -------------------------------------------------------------------------------- 1 | '''Quick and dirty alternative models file for testing purposes.''' 2 | 3 | from sqlalchemy import ( 4 | Column, 5 | Index, 6 | Integer, 7 | Text, 8 | BigInteger, 9 | DateTime, 10 | ForeignKey, 11 | UniqueConstraint, 12 | ) 13 | 14 | from sqlalchemy.ext.declarative import declarative_base 15 | 16 | Base = declarative_base() 17 | 18 | IdType = BigInteger 19 | def IdColumn(): 20 | '''Convenience function: the default Column for object ids.''' 21 | return Column(IdType, primary_key=True, autoincrement=True) 22 | def IdRefColumn(reference, *args, **kwargs): 23 | '''Convenience function: the default Column for references to object ids.''' 24 | return Column(IdType, ForeignKey(reference), *args, **kwargs) 25 | 26 | class CompositeKey(Base): 27 | __tablename__ = 'people2' 28 | primary_one = IdColumn() 29 | primary_two = IdColumn() 30 | other_column = Column(Text) 31 | -------------------------------------------------------------------------------- /test_project/test_project/play_objects.py: -------------------------------------------------------------------------------- 1 | from . import models 2 | s = models.DBSession 3 | 4 | class DBObjects(): 5 | 6 | def __init__(self): 7 | self.alice = s.query(models.Person).get(1) 8 | self.bob = s.query(models.Person).get(2) 9 | -------------------------------------------------------------------------------- /test_project/test_project/query_tests.py: -------------------------------------------------------------------------------- 1 | import ltree_models 2 | import sqlalchemy 3 | from sqlalchemy import ( 4 | create_engine, 5 | ) 6 | from sqlalchemy.orm import ( 7 | aliased, 8 | ) 9 | import testing.postgresql 10 | from test_project import ( 11 | test_data 12 | ) 13 | from test_project.models import ( 14 | DBSession, 15 | ArticleAuthorAssociation, 16 | ArticleByAssoc, 17 | ArticleByObj, 18 | Base, 19 | Blog, 20 | Person, 21 | LtreeNode, 22 | TreeNode, 23 | ) 24 | import transaction 25 | import unittest 26 | 27 | def setUpModule(): 28 | '''Create a test DB and import data.''' 29 | # Create a new database somewhere in /tmp 30 | global postgresql 31 | global engine 32 | postgresql = testing.postgresql.Postgresql(port=7654) 33 | engine = create_engine(postgresql.url()) 34 | ltree_models.add_ltree_extension(engine) 35 | DBSession.configure(bind=engine) 36 | 37 | 38 | def tearDownModule(): 39 | '''Throw away test DB.''' 40 | global postgresql 41 | DBSession.close() 42 | postgresql.stop() 43 | 44 | 45 | class DBTestBase(unittest.TestCase): 46 | 47 | def setUp(self): 48 | Base.metadata.create_all(engine) 49 | # Add some basic test data. 50 | test_data.add_to_db(engine) 51 | transaction.begin() 52 | 53 | def tearDown(self): 54 | transaction.abort() 55 | Base.metadata.drop_all(engine) 56 | 57 | 58 | class IllustrateRelatedQueries(DBTestBase): 59 | 60 | def test_fk_one_to_many(self): 61 | query = DBSession.query(Blog).select_from(Person).join( 62 | Person.blogs 63 | ).filter( 64 | Person.id == '1' 65 | ) 66 | alice = DBSession.query(Person).get('1') 67 | self.assertEqual(query.all(), alice.blogs) 68 | 69 | def test_fk_many_to_one(self): 70 | query = DBSession.query(Person).select_from(Blog).join( 71 | Blog.owner 72 | ).filter( 73 | Blog.id == '1' 74 | ) 75 | self.assertEqual(query.one(), DBSession.query(Person).get('1')) 76 | 77 | def test_fk_many_to_many_assoc_table(self): 78 | query = DBSession.query(ArticleByAssoc).select_from(Person).join( 79 | Person.articles_by_assoc 80 | ).filter( 81 | Person.id == '11' 82 | ) 83 | person11 = DBSession.query(Person).get('11') 84 | self.assertEqual(query.all(), person11.articles_by_assoc) 85 | query = DBSession.query(ArticleByAssoc).select_from(Person).join( 86 | Person.articles_by_assoc 87 | ).filter( 88 | Person.id == '12' 89 | ) 90 | person12 = DBSession.query(Person).get('12') 91 | self.assertEqual(query.all(), person12.articles_by_assoc) 92 | 93 | def test_fk_many_to_many_assoc_proxy(self): 94 | rel = sqlalchemy.inspect(Person).all_orm_descriptors['articles_by_proxy'] 95 | proxy = rel.for_class(Person) 96 | # print(proxy.local_attr) 97 | # print(proxy.remote_attr) 98 | query = DBSession.query(ArticleByObj).select_from(Person).join( 99 | # Person.article_associations 100 | proxy.local_attr 101 | ).join( 102 | # ArticleAuthorAssociation.article 103 | proxy.remote_attr 104 | ).filter( 105 | Person.id == '12' 106 | ) 107 | person12 = DBSession.query(Person).get('12') 108 | self.assertEqual( 109 | [aa.article for aa in person12.article_associations], 110 | query.all() 111 | ) 112 | 113 | def test_fk_self_one_to_many(self): 114 | tn2 = aliased(TreeNode) 115 | query = DBSession.query(TreeNode).select_from(tn2).join( 116 | tn2.children 117 | ).filter( 118 | tn2.id == '1' 119 | ) 120 | root = DBSession.query(TreeNode).get('1') 121 | self.assertEqual(query.all(), root.children) 122 | 123 | def test_fk_self_many_to_one(self): 124 | tn2 = aliased(TreeNode) 125 | query = DBSession.query(TreeNode).select_from(tn2).join( 126 | tn2.parent 127 | ).filter( 128 | tn2.id == '2' 129 | ) 130 | child = DBSession.query(TreeNode).get('2') 131 | self.assertEqual(query.one(), child.parent) 132 | 133 | def test_join_condition_one_to_many(self): 134 | query = DBSession.query(Blog).select_from(Person).join( 135 | Person.blogs_from_titles 136 | ).filter( 137 | Person.id == '1' 138 | ) 139 | alice = DBSession.query(Person).get('1') 140 | self.assertEqual(query.all(), alice.blogs_from_titles) 141 | 142 | def test_ltree_node_children(self): 143 | lt2 = aliased(LtreeNode) 144 | query = DBSession.query(LtreeNode).select_from(lt2).join( 145 | lt2.children 146 | ).filter( 147 | lt2.id == '1' 148 | ) 149 | root = DBSession.query(LtreeNode).get('1') 150 | self.assertEqual(query.all(), root.children) 151 | 152 | def test_ltree_node_parent(self): 153 | lt2 = aliased(LtreeNode) 154 | query = DBSession.query(LtreeNode).select_from(lt2).join( 155 | lt2.parent 156 | ).filter( 157 | lt2.id == '2' 158 | ) 159 | child = DBSession.query(LtreeNode).get('2') 160 | self.assertEqual(query.one(), child.parent) 161 | 162 | def test_ltree_node_ancestors(self): 163 | lt2 = aliased(LtreeNode) 164 | query = DBSession.query(LtreeNode).select_from(lt2).join( 165 | lt2.ancestors 166 | ).filter( 167 | lt2.node_name == 'r.1.2' 168 | ) 169 | node = DBSession.query(LtreeNode).filter(LtreeNode.node_name == 'r.1.2').one() 170 | # self.assertEqual(query.all(), root.children) 171 | print(query.all()) 172 | -------------------------------------------------------------------------------- /test_project/test_project/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | # package 2 | -------------------------------------------------------------------------------- /test_project/test_project/scripts/initializedb.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import transaction 4 | 5 | from sqlalchemy import engine_from_config 6 | 7 | from pyramid.paster import ( 8 | get_appsettings, 9 | setup_logging, 10 | ) 11 | 12 | from ..models import ( 13 | DBSession, 14 | Base, 15 | Person, 16 | Blog, 17 | Post 18 | ) 19 | 20 | from .. import test_data 21 | 22 | 23 | def usage(argv): 24 | cmd = os.path.basename(argv[0]) 25 | print('usage: %s \n' 26 | '(example: "%s development.ini")' % (cmd, cmd)) 27 | sys.exit(1) 28 | 29 | 30 | def main(argv=sys.argv): 31 | if len(argv) != 2: 32 | usage(argv) 33 | config_uri = argv[1] 34 | setup_logging(config_uri) 35 | settings = get_appsettings(config_uri) 36 | engine = engine_from_config(settings, 'sqlalchemy.') 37 | DBSession.configure(bind=engine) 38 | Base.metadata.create_all(engine) 39 | test_data.add_to_db() 40 | -------------------------------------------------------------------------------- /test_project/test_project/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/colinhiggs/pyramid-jsonapi/ec8623ec99911e2028b76a564d6f25af8862ec49/test_project/test_project/static/favicon.ico -------------------------------------------------------------------------------- /test_project/test_project/static/footerbg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/colinhiggs/pyramid-jsonapi/ec8623ec99911e2028b76a564d6f25af8862ec49/test_project/test_project/static/footerbg.png -------------------------------------------------------------------------------- /test_project/test_project/static/headerbg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/colinhiggs/pyramid-jsonapi/ec8623ec99911e2028b76a564d6f25af8862ec49/test_project/test_project/static/headerbg.png -------------------------------------------------------------------------------- /test_project/test_project/static/ie6.css: -------------------------------------------------------------------------------- 1 | * html img, 2 | * html .png{position:relative;behavior:expression((this.runtimeStyle.behavior="none")&&(this.pngSet?this.pngSet=true:(this.nodeName == "IMG" && this.src.toLowerCase().indexOf('.png')>-1?(this.runtimeStyle.backgroundImage = "none", 3 | this.runtimeStyle.filter = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + this.src + "',sizingMethod='image')", 4 | this.src = "static/transparent.gif"):(this.origBg = this.origBg? this.origBg :this.currentStyle.backgroundImage.toString().replace('url("','').replace('")',''), 5 | this.runtimeStyle.filter = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + this.origBg + "',sizingMethod='crop')", 6 | this.runtimeStyle.backgroundImage = "none")),this.pngSet=true) 7 | );} 8 | #wrap{display:table;height:100%} 9 | -------------------------------------------------------------------------------- /test_project/test_project/static/middlebg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/colinhiggs/pyramid-jsonapi/ec8623ec99911e2028b76a564d6f25af8862ec49/test_project/test_project/static/middlebg.png -------------------------------------------------------------------------------- /test_project/test_project/static/pylons.css: -------------------------------------------------------------------------------- 1 | html, body, div, span, applet, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, pre, a, abbr, acronym, address, big, cite, code, del, dfn, em, font, img, ins, kbd, q, s, samp, small, strike, strong, sub, sup, tt, var, b, u, i, center, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td 2 | { 3 | margin: 0; 4 | padding: 0; 5 | border: 0; 6 | outline: 0; 7 | font-size: 100%; /* 16px */ 8 | vertical-align: baseline; 9 | background: transparent; 10 | } 11 | 12 | body 13 | { 14 | line-height: 1; 15 | } 16 | 17 | ol, ul 18 | { 19 | list-style: none; 20 | } 21 | 22 | blockquote, q 23 | { 24 | quotes: none; 25 | } 26 | 27 | blockquote:before, blockquote:after, q:before, q:after 28 | { 29 | content: ''; 30 | content: none; 31 | } 32 | 33 | :focus 34 | { 35 | outline: 0; 36 | } 37 | 38 | ins 39 | { 40 | text-decoration: none; 41 | } 42 | 43 | del 44 | { 45 | text-decoration: line-through; 46 | } 47 | 48 | table 49 | { 50 | border-collapse: collapse; 51 | border-spacing: 0; 52 | } 53 | 54 | sub 55 | { 56 | vertical-align: sub; 57 | font-size: smaller; 58 | line-height: normal; 59 | } 60 | 61 | sup 62 | { 63 | vertical-align: super; 64 | font-size: smaller; 65 | line-height: normal; 66 | } 67 | 68 | ul, menu, dir 69 | { 70 | display: block; 71 | list-style-type: disc; 72 | margin: 1em 0; 73 | padding-left: 40px; 74 | } 75 | 76 | ol 77 | { 78 | display: block; 79 | list-style-type: decimal-leading-zero; 80 | margin: 1em 0; 81 | padding-left: 40px; 82 | } 83 | 84 | li 85 | { 86 | display: list-item; 87 | } 88 | 89 | ul ul, ul ol, ul dir, ul menu, ul dl, ol ul, ol ol, ol dir, ol menu, ol dl, dir ul, dir ol, dir dir, dir menu, dir dl, menu ul, menu ol, menu dir, menu menu, menu dl, dl ul, dl ol, dl dir, dl menu, dl dl 90 | { 91 | margin-top: 0; 92 | margin-bottom: 0; 93 | } 94 | 95 | ol ul, ul ul, menu ul, dir ul, ol menu, ul menu, menu menu, dir menu, ol dir, ul dir, menu dir, dir dir 96 | { 97 | list-style-type: circle; 98 | } 99 | 100 | ol ol ul, ol ul ul, ol menu ul, ol dir ul, ol ol menu, ol ul menu, ol menu menu, ol dir menu, ol ol dir, ol ul dir, ol menu dir, ol dir dir, ul ol ul, ul ul ul, ul menu ul, ul dir ul, ul ol menu, ul ul menu, ul menu menu, ul dir menu, ul ol dir, ul ul dir, ul menu dir, ul dir dir, menu ol ul, menu ul ul, menu menu ul, menu dir ul, menu ol menu, menu ul menu, menu menu menu, menu dir menu, menu ol dir, menu ul dir, menu menu dir, menu dir dir, dir ol ul, dir ul ul, dir menu ul, dir dir ul, dir ol menu, dir ul menu, dir menu menu, dir dir menu, dir ol dir, dir ul dir, dir menu dir, dir dir dir 101 | { 102 | list-style-type: square; 103 | } 104 | 105 | .hidden 106 | { 107 | display: none; 108 | } 109 | 110 | p 111 | { 112 | line-height: 1.5em; 113 | } 114 | 115 | h1 116 | { 117 | font-size: 1.75em; 118 | line-height: 1.7em; 119 | font-family: helvetica, verdana; 120 | } 121 | 122 | h2 123 | { 124 | font-size: 1.5em; 125 | line-height: 1.7em; 126 | font-family: helvetica, verdana; 127 | } 128 | 129 | h3 130 | { 131 | font-size: 1.25em; 132 | line-height: 1.7em; 133 | font-family: helvetica, verdana; 134 | } 135 | 136 | h4 137 | { 138 | font-size: 1em; 139 | line-height: 1.7em; 140 | font-family: helvetica, verdana; 141 | } 142 | 143 | html, body 144 | { 145 | width: 100%; 146 | height: 100%; 147 | } 148 | 149 | body 150 | { 151 | margin: 0; 152 | padding: 0; 153 | background-color: #fff; 154 | position: relative; 155 | font: 16px/24px NobileRegular, "Lucida Grande", Lucida, Verdana, sans-serif; 156 | } 157 | 158 | a 159 | { 160 | color: #1b61d6; 161 | text-decoration: none; 162 | } 163 | 164 | a:hover 165 | { 166 | color: #e88f00; 167 | text-decoration: underline; 168 | } 169 | 170 | body h1, body h2, body h3, body h4, body h5, body h6 171 | { 172 | font-family: NeutonRegular, "Lucida Grande", Lucida, Verdana, sans-serif; 173 | font-weight: 400; 174 | color: #373839; 175 | font-style: normal; 176 | } 177 | 178 | #wrap 179 | { 180 | min-height: 100%; 181 | } 182 | 183 | #header, #footer 184 | { 185 | width: 100%; 186 | color: #fff; 187 | height: 40px; 188 | position: absolute; 189 | text-align: center; 190 | line-height: 40px; 191 | overflow: hidden; 192 | font-size: 12px; 193 | vertical-align: middle; 194 | } 195 | 196 | #header 197 | { 198 | background: #000; 199 | top: 0; 200 | font-size: 14px; 201 | } 202 | 203 | #footer 204 | { 205 | bottom: 0; 206 | background: #000 url(footerbg.png) repeat-x 0 top; 207 | position: relative; 208 | margin-top: -40px; 209 | clear: both; 210 | } 211 | 212 | .header, .footer 213 | { 214 | width: 750px; 215 | margin-right: auto; 216 | margin-left: auto; 217 | } 218 | 219 | .wrapper 220 | { 221 | width: 100%; 222 | } 223 | 224 | #top, #top-small, #bottom 225 | { 226 | width: 100%; 227 | } 228 | 229 | #top 230 | { 231 | color: #000; 232 | height: 230px; 233 | background: #fff url(headerbg.png) repeat-x 0 top; 234 | position: relative; 235 | } 236 | 237 | #top-small 238 | { 239 | color: #000; 240 | height: 60px; 241 | background: #fff url(headerbg.png) repeat-x 0 top; 242 | position: relative; 243 | } 244 | 245 | #bottom 246 | { 247 | color: #222; 248 | background-color: #fff; 249 | } 250 | 251 | .top, .top-small, .middle, .bottom 252 | { 253 | width: 750px; 254 | margin-right: auto; 255 | margin-left: auto; 256 | } 257 | 258 | .top 259 | { 260 | padding-top: 40px; 261 | } 262 | 263 | .top-small 264 | { 265 | padding-top: 10px; 266 | } 267 | 268 | #middle 269 | { 270 | width: 100%; 271 | height: 100px; 272 | background: url(middlebg.png) repeat-x; 273 | border-top: 2px solid #fff; 274 | border-bottom: 2px solid #b2b2b2; 275 | } 276 | 277 | .app-welcome 278 | { 279 | margin-top: 25px; 280 | } 281 | 282 | .app-name 283 | { 284 | color: #000; 285 | font-weight: 700; 286 | } 287 | 288 | .bottom 289 | { 290 | padding-top: 50px; 291 | } 292 | 293 | #left 294 | { 295 | width: 350px; 296 | float: left; 297 | padding-right: 25px; 298 | } 299 | 300 | #right 301 | { 302 | width: 350px; 303 | float: right; 304 | padding-left: 25px; 305 | } 306 | 307 | .align-left 308 | { 309 | text-align: left; 310 | } 311 | 312 | .align-right 313 | { 314 | text-align: right; 315 | } 316 | 317 | .align-center 318 | { 319 | text-align: center; 320 | } 321 | 322 | ul.links 323 | { 324 | margin: 0; 325 | padding: 0; 326 | } 327 | 328 | ul.links li 329 | { 330 | list-style-type: none; 331 | font-size: 14px; 332 | } 333 | 334 | form 335 | { 336 | border-style: none; 337 | } 338 | 339 | fieldset 340 | { 341 | border-style: none; 342 | } 343 | 344 | input 345 | { 346 | color: #222; 347 | border: 1px solid #ccc; 348 | font-family: sans-serif; 349 | font-size: 12px; 350 | line-height: 16px; 351 | } 352 | 353 | input[type=text], input[type=password] 354 | { 355 | width: 205px; 356 | } 357 | 358 | input[type=submit] 359 | { 360 | background-color: #ddd; 361 | font-weight: 700; 362 | } 363 | 364 | /*Opera Fix*/ 365 | body:before 366 | { 367 | content: ""; 368 | height: 100%; 369 | float: left; 370 | width: 0; 371 | margin-top: -32767px; 372 | } 373 | -------------------------------------------------------------------------------- /test_project/test_project/static/pyramid-small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/colinhiggs/pyramid-jsonapi/ec8623ec99911e2028b76a564d6f25af8862ec49/test_project/test_project/static/pyramid-small.png -------------------------------------------------------------------------------- /test_project/test_project/static/pyramid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/colinhiggs/pyramid-jsonapi/ec8623ec99911e2028b76a564d6f25af8862ec49/test_project/test_project/static/pyramid.png -------------------------------------------------------------------------------- /test_project/test_project/static/transparent.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/colinhiggs/pyramid-jsonapi/ec8623ec99911e2028b76a564d6f25af8862ec49/test_project/test_project/static/transparent.gif -------------------------------------------------------------------------------- /test_project/test_project/templates/mytemplate.pt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | The Pyramid Web Application Development Framework 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 15 | 16 | 17 |
18 |
19 |
20 |
pyramid
21 |
22 |
23 |
24 |
25 |

26 | Welcome to ${project}, an application generated by
27 | the Pyramid web application development framework. 28 |

29 |
30 |
31 |
32 |
33 |
34 |

Search documentation

35 |
36 | 37 | 38 |
39 |
40 | 69 |
70 |
71 |
72 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /test_project/test_project/test-openapi.json: -------------------------------------------------------------------------------- 1 | { 2 | "openapi": "999" 3 | } 4 | -------------------------------------------------------------------------------- /test_project/test_project/test_data.py: -------------------------------------------------------------------------------- 1 | import sqlalchemy 2 | from sqlalchemy import ( 3 | delete, 4 | func, 5 | ) 6 | import transaction 7 | from test_project import models 8 | from test_project.models import ( 9 | DBSession, 10 | ) 11 | import datetime 12 | import inspect 13 | import sys 14 | import json 15 | from pathlib import Path 16 | import re 17 | 18 | import ltree_models 19 | 20 | def add_to_db(engine): 21 | '''Add some basic test data.''' 22 | meta = sqlalchemy.MetaData() 23 | meta.reflect(engine) 24 | # Some initial data in a handy form. 25 | module_file = Path(inspect.getfile(sys.modules[__name__])) 26 | with open(str(module_file.parent / 'test_data.json')) as f: 27 | data = json.load(f) 28 | with transaction.manager: 29 | for dataset in data['models']: 30 | model = getattr(models, dataset[0]) 31 | opts = None 32 | if len(dataset) > 2: 33 | opts = dataset[2] 34 | for item in dataset[1]: 35 | set_item(model, item_transform(item), opts) 36 | # Set the current value of the associated sequence to the maximum 37 | # id we added. 38 | try: 39 | id_col_name = model.__pyramid_jsonapi__['id_col_name'] 40 | except AttributeError: 41 | id_col_name = sqlalchemy.inspect(model).primary_key[0].name 42 | seq_text = meta.tables[model.__tablename__].columns[id_col_name].server_default.arg.text 43 | seq_name = re.match(r"^nextval\('(\w+)'::", seq_text).group(1) 44 | max_id = DBSession.query(func.max(getattr(model, id_col_name))).one()[0] 45 | DBSession.execute("select setval('{}', {})".format(seq_name, max_id)) 46 | 47 | for assoc_data in data.get('associations',[]): 48 | table = getattr(models, assoc_data[0]) 49 | for assoc in assoc_data[1]: 50 | rows = DBSession.query(table).filter_by(**assoc).all() 51 | if not rows: 52 | DBSession.execute(table.insert(), assoc) 53 | 54 | DBSession.execute(delete(models.LtreeNode.__table__)) 55 | 56 | lbuilder = ltree_models.LtreeBuilder(DBSession.bind, models.LtreeNode) 57 | lbuilder.populate(2, 5) 58 | 59 | def item_transform(item): 60 | '''Transform item prior to saving to database. 61 | 62 | * Attributes named __json__ will be renamed to with 63 | values parsed by the json parser first. 64 | ''' 65 | new_item = {} 66 | for att, val in item.items(): 67 | if att.startswith('__json__'): 68 | att = att.replace('__json__','') 69 | val = json.loads(val) 70 | new_item[att] = val 71 | return new_item 72 | 73 | def set_item(model, data, opts): 74 | '''Make sure item exists in the db with attributes as specified in data. 75 | ''' 76 | # Assume only one primary key 77 | if opts is None: 78 | opts = dict() 79 | 80 | keycols = sqlalchemy.inspect(model).primary_key 81 | if len(keycols) > 1: 82 | raise Exception( 83 | 'Model {} has more than one primary key.'.format( 84 | model_class.__name__ 85 | ) 86 | ) 87 | keycol = keycols[0] 88 | item = DBSession.query(model).get(data[keycol.name]) 89 | if item: 90 | for key, val in data.items(): 91 | setattr(item, key, val) 92 | else: 93 | item = model(**data) 94 | DBSession.add(item) 95 | seq_name = opts.get('id_seq') 96 | if seq_name is not None: 97 | # The key columnn gets its default value from a sequence: make sure 98 | # that the sequence is updated to at least the value of the id we're 99 | # adding now. 100 | if seq_name == '*': 101 | # '*' indicates use the default sequence name. 102 | seq_name = '{}_{}_seq'.format( 103 | sqlalchemy.inspect(item).mapper.class_.__tablename__, 104 | keycol.name 105 | ) 106 | 107 | item_id = getattr(item, keycol.name) 108 | 109 | # Increment the sequence since: 110 | # 1) We'll probably need to anyway as we add the item. 111 | # 2) It's the only way to find out the value if the sequence 112 | # hasn't been used yet in this session. 113 | seqval = DBSession.execute( 114 | "select nextval('{}')".format(seq_name) 115 | ).scalar() 116 | 117 | if seqval > int(item_id): 118 | # If seqval is higher than item_id then we shouldn't have 119 | # incremented it: put it back by one. 120 | # 121 | # WARNING: this is not safe! We didn't do it atomically and 122 | # there's a danger someone in another session/transaction 123 | # changed the sequence in between. 124 | # 125 | # We should be fine here because we're only populating the DB 126 | # with test data - no-one else should be using it. 127 | DBSession.execute( 128 | "select setval('{}', {})".format( 129 | seq_name, seqval - 1 130 | ) 131 | ).scalar() 132 | seqval = seqval - 1 133 | -------------------------------------------------------------------------------- /test_project/test_project/views.py: -------------------------------------------------------------------------------- 1 | from pyramid.response import Response 2 | from pyramid.view import view_config 3 | 4 | from sqlalchemy.exc import DBAPIError 5 | 6 | from .models import ( 7 | DBSession, 8 | ) 9 | 10 | @view_config(route_name='echo', match_param='type=params', renderer='json') 11 | def echo_params(request): 12 | return {k: request.params.getall(k) for k in request.params.keys()} 13 | 14 | @view_config(route_name='echo', match_param='type=request', renderer='json') 15 | def echo_params(request): 16 | return { 17 | 'method': request.method, 18 | 'url': request.url, 19 | 'headers': dict(request.headers), 20 | 'body': request.body.decode('utf8'), 21 | } 22 | -------------------------------------------------------------------------------- /test_project/testing.ini: -------------------------------------------------------------------------------- 1 | ### 2 | # app configuration 3 | # http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html 4 | ### 5 | 6 | [app:main] 7 | use = egg:test_project 8 | 9 | pyramid_jsonapi.allow_client_ids = true 10 | pyramid_jsonapi_tests.models_iterable = module 11 | pyramid_jsonapi.route_pattern_api_prefix = 12 | pyramid_jsonapi.debug_traceback = true 13 | 14 | pyramid.reload_templates = true 15 | pyramid.debug_authorization = false 16 | pyramid.debug_notfound = false 17 | pyramid.debug_routematch = false 18 | pyramid.default_locale_name = en 19 | pyramid.includes = 20 | pyramid_debugtoolbar 21 | pyramid_tm 22 | 23 | # The test suite should create a database in /tmp and serve at this url 24 | sqlalchemy.url = postgresql://postgres@localhost:7654/test 25 | 26 | # By default, the toolbar only appears for clients from IP addresses 27 | # '127.0.0.1' and '::1'. 28 | # debugtoolbar.hosts = 127.0.0.1 ::1 29 | 30 | ### 31 | # wsgi server configuration 32 | ### 33 | 34 | [server:main] 35 | use = egg:waitress#main 36 | host = 0.0.0.0 37 | port = 6543 38 | 39 | ### 40 | # logging configuration 41 | # http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html 42 | ### 43 | 44 | [loggers] 45 | keys = root, test_project, sqlalchemy 46 | 47 | [handlers] 48 | keys = console 49 | 50 | [formatters] 51 | keys = generic 52 | 53 | [logger_root] 54 | level = INFO 55 | handlers = console 56 | 57 | [logger_test_project] 58 | level = DEBUG 59 | handlers = 60 | qualname = test_project 61 | 62 | [logger_sqlalchemy] 63 | level = INFO 64 | handlers = 65 | qualname = sqlalchemy.engine 66 | # "level = INFO" logs SQL queries. 67 | # "level = DEBUG" logs SQL queries and results. 68 | # "level = WARN" logs neither. (Recommended for production systems.) 69 | 70 | [handler_console] 71 | class = StreamHandler 72 | args = (sys.stderr,) 73 | level = NOTSET 74 | formatter = generic 75 | 76 | [formatter_generic] 77 | format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s 78 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist=py3, docs, report 3 | skip_install=true 4 | skipsdist=true 5 | 6 | [testenv:py3] 7 | # Deps needed for code testing (actual deps are in setup.py) 8 | deps= 9 | coverage 10 | coveralls 11 | pycodestyle 12 | pylint 13 | commands= 14 | ## Comands fail 'fast' so later commands won't run if any earlier one fails 15 | # Install 'local' pyramid_jsonapi 16 | pip install -e . 17 | # Install deps for test_project 18 | pip install -e test_project 19 | pycodestyle --ignore=E402,E501,W503,W504,E731 pyramid_jsonapi 20 | pylint --errors-only --rcfile=.pylintrc pyramid_jsonapi 21 | # Call unittest from coverage (add --buffer to 'discover' to hide output from tests that pass) 22 | coverage run --source=pyramid_jsonapi -m unittest --verbose -f {posargs:pyramid_jsonapi.unit_tests test_project.tests} 23 | # Generate coverage report 24 | #coverage report -m 25 | # Try to push coverage data to coveralls (ignore exit code as will fail if not on travis) 26 | - coveralls 27 | 28 | [testenv:report] 29 | deps= 30 | coverage 31 | commands= 32 | coverage report -m 33 | 34 | [testenv:docs] 35 | # Deps needed for code testing (actual deps are in setup.py) 36 | deps= 37 | sphinx 38 | sphinx-rtd-theme 39 | travis-sphinx 40 | # Pass in TRAVIS tokens and GH_TOKEN for travis-sphinx 41 | passenv=TRAVIS TRAVIS_* GH_TOKEN 42 | commands= 43 | pip install -e . 44 | # Build the sphinx docs (will push to gh-pages if tox is run by travis) 45 | docs: /bin/bash docs/sphinx.sh 46 | whitelist_externals= 47 | /bin/bash 48 | --------------------------------------------------------------------------------