├── .github └── workflows │ ├── publish.yml │ └── test.yml ├── .gitignore ├── LICENSE ├── README.md ├── datasette_atom └── __init__.py ├── setup.py └── tests ├── __init__.py └── test_atom.py /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python Package 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | permissions: 8 | contents: read 9 | 10 | jobs: 11 | test: 12 | runs-on: ubuntu-latest 13 | strategy: 14 | matrix: 15 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] 16 | steps: 17 | - uses: actions/checkout@v3 18 | - name: Set up Python ${{ matrix.python-version }} 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | cache: pip 23 | cache-dependency-path: setup.py 24 | - name: Install dependencies 25 | run: | 26 | pip install '.[test]' 27 | - name: Run tests 28 | run: | 29 | pytest 30 | deploy: 31 | runs-on: ubuntu-latest 32 | needs: [test] 33 | steps: 34 | - uses: actions/checkout@v3 35 | - name: Set up Python 36 | uses: actions/setup-python@v4 37 | with: 38 | python-version: "3.11" 39 | cache: pip 40 | cache-dependency-path: setup.py 41 | - name: Install dependencies 42 | run: | 43 | pip install setuptools wheel twine build 44 | - name: Publish 45 | env: 46 | TWINE_USERNAME: __token__ 47 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} 48 | run: | 49 | python -m build 50 | twine upload dist/* 51 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: [push, pull_request] 4 | 5 | permissions: 6 | contents: read 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] 14 | steps: 15 | - uses: actions/checkout@v3 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | cache: pip 21 | cache-dependency-path: setup.py 22 | - name: Install dependencies 23 | run: | 24 | pip install '.[test]' 25 | - name: Run tests 26 | run: | 27 | pytest 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | venv 6 | .eggs 7 | .pytest_cache 8 | *.egg-info -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # datasette-atom 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/datasette-atom.svg)](https://pypi.org/project/datasette-atom/) 4 | [![Changelog](https://img.shields.io/github/v/release/simonw/datasette-atom?include_prereleases&label=changelog)](https://github.com/simonw/datasette-atom/releases) 5 | [![Tests](https://github.com/simonw/datasette-atom/workflows/Test/badge.svg)](https://github.com/simonw/datasette-atom/actions?query=workflow%3ATest) 6 | [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette-atom/blob/main/LICENSE) 7 | 8 | Datasette plugin that adds support for generating [Atom feeds](https://validator.w3.org/feed/docs/atom.html) with the results of a SQL query. 9 | 10 | ## Installation 11 | 12 | Install this plugin in the same environment as Datasette to enable the `.atom` output extension. 13 | 14 | $ pip install datasette-atom 15 | 16 | ## Usage 17 | 18 | To create an Atom feed you need to define a custom SQL query that returns a required set of columns: 19 | 20 | * `atom_id` - a unique ID for each row. [This article](https://web.archive.org/web/20080211143232/http://diveintomark.org/archives/2004/05/28/howto-atom-id) has suggestions about ways to create these IDs. 21 | * `atom_title` - a title for that row. 22 | * `atom_updated` - an [RFC 3339](http://www.faqs.org/rfcs/rfc3339.html) timestamp representing the last time the entry was modified in a significant way. This can usually be the time that the row was created. 23 | 24 | The following columns are optional: 25 | 26 | * `atom_content` - content that should be shown in the feed. This will be treated as a regular string, so any embedded HTML tags will be escaped when they are displayed. 27 | * `atom_content_html` - content that should be shown in the feed. This will be treated as an HTML string, and will be sanitized using [Bleach](https://github.com/mozilla/bleach) to ensure it does not have any malicious code in it before being returned as part of a `` Atom element. If both are provided, this will be used in place of `atom_content`. 28 | * `atom_link` - a URL that should be used as the link that the feed entry points to. 29 | * `atom_author_name` - the name of the author of the entry. If you provide this you can also provide `atom_author_uri` and `atom_author_email` with a URL and e-mail address for that author. 30 | 31 | A query that returns these columns can then be returned as an Atom feed by adding the `.atom` extension. 32 | 33 | ## Example 34 | 35 | Here is an example SQL query which generates an Atom feed for new entries on [www.niche-museums.com](https://www.niche-museums.com/): 36 | 37 | ```sql 38 | select 39 | 'tag:niche-museums.com,' || substr(created, 0, 11) || ':' || id as atom_id, 40 | name as atom_title, 41 | created as atom_updated, 42 | 'https://www.niche-museums.com/browse/museums/' || id as atom_link, 43 | coalesce( 44 | '', 45 | '' 46 | ) || '

' || description || '

' as atom_content_html 47 | from 48 | museums 49 | order by 50 | created desc 51 | limit 52 | 15 53 | ``` 54 | 55 | You can try this query by [pasting it in here](https://www.niche-museums.com/browse) - then click the `.atom` link to see it as an Atom feed. 56 | 57 | ## Using a canned query 58 | 59 | Datasette's [canned query mechanism](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries) is a useful way to configure feeds. If a canned query definition has a `title` that will be used as the title of the Atom feed. 60 | 61 | Here's an example, defined using a `metadata.yaml` file: 62 | 63 | ```yaml 64 | databases: 65 | browse: 66 | queries: 67 | feed: 68 | title: Niche Museums 69 | sql: |- 70 | select 71 | 'tag:niche-museums.com,' || substr(created, 0, 11) || ':' || id as atom_id, 72 | name as atom_title, 73 | created as atom_updated, 74 | 'https://www.niche-museums.com/browse/museums/' || id as atom_link, 75 | coalesce( 76 | '', 77 | '' 78 | ) || '

' || description || '

' as atom_content_html 79 | from 80 | museums 81 | order by 82 | created desc 83 | limit 84 | 15 85 | ``` 86 | ## Disabling HTML filtering 87 | 88 | The HTML allow-list used by Bleach for the `atom_content_html` column can be found in the `clean(html)` function at the bottom of [datasette_atom/__init__.py](https://github.com/simonw/datasette-atom/blob/main/datasette_atom/__init__.py). 89 | 90 | You can disable Bleach entirely for Atom feeds generated using a canned query. You should only do this if you are certain that no user-provided HTML could be included in that value. 91 | 92 | Here's how to do that in `metadata.json`: 93 | 94 | ```json 95 | { 96 | "plugins": { 97 | "datasette-atom": { 98 | "allow_unsafe_html_in_canned_queries": true 99 | } 100 | } 101 | } 102 | ``` 103 | Setting this to `true` will disable Bleach filtering for all canned queries across all databases. 104 | 105 | You can disable Bleach filtering just for a specific list of canned queries like so: 106 | 107 | ```json 108 | { 109 | "plugins": { 110 | "datasette-atom": { 111 | "allow_unsafe_html_in_canned_queries": { 112 | "museums": ["latest", "moderation"] 113 | } 114 | } 115 | } 116 | } 117 | ``` 118 | This will disable Bleach just for the canned queries called `latest` and `moderation` in the `museums.db` database. 119 | -------------------------------------------------------------------------------- /datasette_atom/__init__.py: -------------------------------------------------------------------------------- 1 | import bleach 2 | from datasette import hookimpl, __version__ 3 | from datasette.utils.asgi import Response 4 | from feedgen.feed import FeedGenerator 5 | import hashlib 6 | import html 7 | 8 | REQUIRED_COLUMNS = {"atom_id", "atom_updated", "atom_title"} 9 | 10 | 11 | @hookimpl 12 | def register_output_renderer(): 13 | return {"extension": "atom", "render": render_atom, "can_render": can_render_atom} 14 | 15 | 16 | def render_atom( 17 | datasette, request, sql, columns, rows, database, table, query_name, view_name, data 18 | ): 19 | from datasette.views.base import DatasetteError 20 | 21 | if not REQUIRED_COLUMNS.issubset(columns): 22 | raise DatasetteError( 23 | "SQL query must return columns {}".format(", ".join(REQUIRED_COLUMNS)), 24 | status=400, 25 | ) 26 | fg = FeedGenerator() 27 | fg.generator( 28 | generator="Datasette", 29 | version=__version__, 30 | uri="https://github.com/simonw/datasette", 31 | ) 32 | fg.id(request.url) 33 | fg.link(href=request.url, rel="self") 34 | fg.updated(max(row["atom_updated"] for row in rows)) 35 | title = request.args.get("_feed_title", sql) 36 | if table: 37 | title += "/" + table 38 | if data.get("human_description_en"): 39 | title += ": " + data["human_description_en"] 40 | # If this is a canned query the configured title for that over-rides all others 41 | if query_name: 42 | try: 43 | title = datasette.metadata(database=database)["queries"][query_name][ 44 | "title" 45 | ] 46 | except (KeyError, TypeError): 47 | pass 48 | fg.title(title) 49 | 50 | clean_function = clean 51 | if query_name: 52 | # Check allow_unsafe_html_in_canned_queries 53 | plugin_config = datasette.plugin_config("datasette-atom") 54 | if plugin_config: 55 | allow_unsafe_html_in_canned_queries = plugin_config.get( 56 | "allow_unsafe_html_in_canned_queries" 57 | ) 58 | if allow_unsafe_html_in_canned_queries is True: 59 | clean_function = lambda s: s 60 | elif isinstance(allow_unsafe_html_in_canned_queries, dict): 61 | allowlist = allow_unsafe_html_in_canned_queries.get(database) or [] 62 | if query_name in allowlist: 63 | clean_function = lambda s: s 64 | 65 | # And the rows 66 | for row in reversed(rows): 67 | entry = fg.add_entry() 68 | entry.id(str(row["atom_id"])) 69 | if "atom_content_html" in columns: 70 | entry.content(clean_function(row["atom_content_html"]), type="html") 71 | elif "atom_content" in columns: 72 | entry.content(row["atom_content"], type="text") 73 | entry.updated(row["atom_updated"]) 74 | entry.title(str(row["atom_title"])) 75 | # atom_link is optional 76 | if "atom_link" in columns: 77 | entry.link(href=row["atom_link"]) 78 | if "atom_author_name" in columns and row["atom_author_name"]: 79 | author = { 80 | "name": row["atom_author_name"], 81 | } 82 | for key in ("uri", "email"): 83 | colname = "atom_author_{}".format(key) 84 | if colname in columns and row[colname]: 85 | author[key] = row[colname] 86 | entry.author(author) 87 | 88 | return Response( 89 | fg.atom_str(pretty=True), 90 | content_type="application/xml; charset=utf-8", 91 | status=200, 92 | ) 93 | 94 | 95 | def can_render_atom(columns): 96 | return REQUIRED_COLUMNS.issubset(columns) 97 | 98 | 99 | def clean(html): 100 | cleaned = bleach.clean( 101 | html, 102 | tags=[ 103 | "a", 104 | "abbr", 105 | "acronym", 106 | "b", 107 | "blockquote", 108 | "br", 109 | "code", 110 | "em", 111 | "i", 112 | "li", 113 | "ol", 114 | "strong", 115 | "ul", 116 | "pre", 117 | "p", 118 | "h1", 119 | "h2", 120 | "h3", 121 | "h4", 122 | "h5", 123 | "h6", 124 | "img", 125 | ], 126 | attributes={"a": ["href", "title"], "img": ["alt", "src"]}, 127 | ) 128 | return cleaned 129 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | import os 3 | 4 | VERSION = "0.9" 5 | 6 | 7 | def get_long_description(): 8 | with open( 9 | os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md"), 10 | encoding="utf8", 11 | ) as fp: 12 | return fp.read() 13 | 14 | 15 | setup( 16 | name="datasette-atom", 17 | description="Datasette plugin that adds a .atom output format", 18 | long_description=get_long_description(), 19 | long_description_content_type="text/markdown", 20 | author="Simon Willison", 21 | url="https://github.com/simonw/datasette-atom", 22 | project_urls={ 23 | "Issues": "https://github.com/simonw/datasette-atom/issues", 24 | "CI": "https://github.com/simonw/datasette-atom/actions", 25 | "Changelog": "https://github.com/simonw/datasette-atom/releases", 26 | }, 27 | license="Apache License, Version 2.0", 28 | version=VERSION, 29 | packages=["datasette_atom"], 30 | entry_points={"datasette": ["atom = datasette_atom"]}, 31 | install_requires=["datasette>=0.49", "feedgen", "bleach"], 32 | extras_require={"test": ["pytest", "pytest-asyncio", "httpx"]}, 33 | tests_require=["datasette-atom[test]"], 34 | ) 35 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simonw/datasette-atom/3fc967d6c420b9128db556ccf92e43ac89b02b99/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_atom.py: -------------------------------------------------------------------------------- 1 | import datasette 2 | from datasette.app import Datasette 3 | import urllib.parse 4 | import pytest 5 | 6 | EXPECTED_ATOM = """ 7 | 8 | 9 | http://localhost/_memory.atom?sql=%0A++++select%0A++++++++1+as+atom_id%2C%0A++++++++123+as+atom_title%2C%0A++++++++%272019-10-23T21%3A32%3A12-07%3A00%27+as+atom_updated%2C%0A++++++++%27blah+%3Cb%3EBold%3C%2Fb%3E%27+as+atom_content%2C%0A++++++++%27Author%27+as+atom_author_name%2C%0A++++++++%27https%3A%2F%2Fwww.example.com%2F%27+as+atom_author_uri%0A++++union+select%0A++++++++%27atom-id-2%27+as+atom_id%2C%0A++++++++%27title+2%27+as+atom_title%2C%0A++++++++%272019-09-23T21%3A32%3A12-07%3A00%27+as+atom_updated%2C%0A++++++++%27blah%27+as+atom_content%2C%0A++++++++null+as+atom_author_name%2C%0A++++++++null+as+atom_author_uri%3B%0A++++ 10 | 11 | select 12 | 1 as atom_id, 13 | 123 as atom_title, 14 | '2019-10-23T21:32:12-07:00' as atom_updated, 15 | 'blah <b>Bold</b>' as atom_content, 16 | 'Author' as atom_author_name, 17 | 'https://www.example.com/' as atom_author_uri 18 | union select 19 | 'atom-id-2' as atom_id, 20 | 'title 2' as atom_title, 21 | '2019-09-23T21:32:12-07:00' as atom_updated, 22 | 'blah' as atom_content, 23 | null as atom_author_name, 24 | null as atom_author_uri; 25 | 26 | 2019-10-23T21:32:12-07:00 27 | 28 | Datasette 29 | 30 | 1 31 | 123 32 | 2019-10-23T21:32:12-07:00 33 | 34 | Author 35 | https://www.example.com/ 36 | 37 | blah <b>Bold</b> 38 | 39 | 40 | atom-id-2 41 | title 2 42 | 2019-09-23T21:32:12-07:00 43 | blah 44 | 45 | 46 | """.strip() 47 | 48 | EXPECTED_ATOM_WITH_LINK = """ 49 | 50 | 51 | http://localhost/_memory.atom?sql=%0A++++select%0A++++++++%27atom-id%27+as+atom_id%2C%0A++++++++%27title%27+as+atom_title%2C%0A++++++++%272019-10-23T21%3A32%3A12-07%3A00%27+as+atom_updated%2C%0A++++++++%27https%3A%2F%2Fwww.niche-museums.com%2F%27+as+atom_link%2C%0A++++++++%27blah%27+as+atom_content%3B%0A++++ 52 | 53 | select 54 | 'atom-id' as atom_id, 55 | 'title' as atom_title, 56 | '2019-10-23T21:32:12-07:00' as atom_updated, 57 | 'https://www.niche-museums.com/' as atom_link, 58 | 'blah' as atom_content; 59 | 60 | 2019-10-23T21:32:12-07:00 61 | 62 | Datasette 63 | 64 | atom-id 65 | title 66 | 2019-10-23T21:32:12-07:00 67 | blah 68 | 69 | 70 | 71 | """.strip() 72 | 73 | EXPECTED_ATOM_WITH_HTML = """ 74 | 75 | 76 | http://localhost/_memory.atom?sql=%0A++++select%0A++++++++%27atom-id%27+as+atom_id%2C%0A++++++++%27title%27+as+atom_title%2C%0A++++++++%272019-10-23T21%3A32%3A12-07%3A00%27+as+atom_updated%2C%0A++++++++%27https%3A%2F%2Fwww.niche-museums.com%2F%27+as+atom_link%2C%0A++++++++%27%3Ch2%3Eblah%3C%2Fh2%3E%3Cbr%3E%3Cscript%3Ealert%28%22bad%22%29%3C%2Fscript%3E%27+as+atom_content_html%3B%0A++++ 77 | 78 | select 79 | 'atom-id' as atom_id, 80 | 'title' as atom_title, 81 | '2019-10-23T21:32:12-07:00' as atom_updated, 82 | 'https://www.niche-museums.com/' as atom_link, 83 | '<h2>blah</h2><br><script>alert("bad")</script>' as atom_content_html; 84 | 85 | 2019-10-23T21:32:12-07:00 86 | 87 | Datasette 88 | 89 | atom-id 90 | title 91 | 2019-10-23T21:32:12-07:00 92 | <h2>blah</h2><br>&lt;script&gt;alert("bad")&lt;/script&gt; 93 | 94 | 95 | 96 | """.strip() 97 | 98 | 99 | @pytest.mark.asyncio 100 | async def test_incorrect_sql_returns_400(): 101 | ds = Datasette(immutables=[], memory=True) 102 | response = await ds.client.get("/_memory.atom?sql=select+sqlite_version()") 103 | assert 400 == response.status_code 104 | assert b"SQL query must return columns" in response.content 105 | 106 | 107 | @pytest.mark.asyncio 108 | async def test_atom_for_valid_query(): 109 | sql = """ 110 | select 111 | 1 as atom_id, 112 | 123 as atom_title, 113 | '2019-10-23T21:32:12-07:00' as atom_updated, 114 | 'blah Bold' as atom_content, 115 | 'Author' as atom_author_name, 116 | 'https://www.example.com/' as atom_author_uri 117 | union select 118 | 'atom-id-2' as atom_id, 119 | 'title 2' as atom_title, 120 | '2019-09-23T21:32:12-07:00' as atom_updated, 121 | 'blah' as atom_content, 122 | null as atom_author_name, 123 | null as atom_author_uri; 124 | """ 125 | ds = Datasette(memory=True) 126 | response = await ds.client.get("/_memory.atom", params={"sql": sql}) 127 | assert 200 == response.status_code 128 | assert "application/xml; charset=utf-8" == response.headers["content-type"] 129 | assert ( 130 | EXPECTED_ATOM.format(version=datasette.__version__) 131 | == response.content.decode("utf-8").strip() 132 | ) 133 | 134 | 135 | @pytest.mark.asyncio 136 | async def test_atom_with_optional_link(): 137 | sql = """ 138 | select 139 | 'atom-id' as atom_id, 140 | 'title' as atom_title, 141 | '2019-10-23T21:32:12-07:00' as atom_updated, 142 | 'https://www.niche-museums.com/' as atom_link, 143 | 'blah' as atom_content; 144 | """ 145 | ds = Datasette(memory=True) 146 | response = await ds.client.get("/_memory.atom", params={"sql": sql}) 147 | assert 200 == response.status_code 148 | assert "application/xml; charset=utf-8" == response.headers["content-type"] 149 | assert ( 150 | EXPECTED_ATOM_WITH_LINK.format(version=datasette.__version__) 151 | == response.content.decode("utf-8").strip() 152 | ) 153 | 154 | 155 | @pytest.mark.asyncio 156 | async def test_atom_with_bad_html(): 157 | sql = """ 158 | select 159 | 'atom-id' as atom_id, 160 | 'title' as atom_title, 161 | '2019-10-23T21:32:12-07:00' as atom_updated, 162 | 'https://www.niche-museums.com/' as atom_link, 163 | '

blah


' as atom_content_html; 164 | """ 165 | ds = Datasette(memory=True) 166 | response = await ds.client.get("/_memory.atom", params={"sql": sql}) 167 | assert 200 == response.status_code 168 | assert "application/xml; charset=utf-8" == response.headers["content-type"] 169 | assert ( 170 | EXPECTED_ATOM_WITH_HTML.format(version=datasette.__version__) 171 | == response.content.decode("utf-8").strip() 172 | ) 173 | 174 | 175 | @pytest.mark.asyncio 176 | async def test_atom_link_only_shown_for_correct_queries(): 177 | sql = """ 178 | select 179 | 'atom-id' as atom_id, 180 | 'title' as atom_title, 181 | '2019-10-23T21:32:12-07:00' as atom_updated, 182 | 'https://www.niche-museums.com/' as atom_link, 183 | '

blah


' as atom_content_html; 184 | """ 185 | ds = Datasette(memory=True) 186 | response = await ds.client.get("/_memory", params={"sql": sql}) 187 | assert 200 == response.status_code 188 | assert "text/html; charset=utf-8" == response.headers["content-type"] 189 | assert 'My atom feed" in xml 221 | 222 | 223 | @pytest.mark.asyncio 224 | @pytest.mark.parametrize( 225 | "config,should_allow", 226 | [ 227 | (True, True), 228 | (False, False), 229 | ({"_memory": ["latest"]}, True), 230 | ({"_memory": ["notlatest"]}, False), 231 | ], 232 | ) 233 | async def test_allow_unsafe_html_in_canned_queries(config, should_allow): 234 | sql = """ 235 | select 236 | 'atom-id' as atom_id, 237 | 'title' as atom_title, 238 | '2019-10-23T21:32:12-07:00' as atom_updated, 239 | 'https://www.niche-museums.com/' as atom_link, 240 | '' as atom_content_html; 241 | """ 242 | metadata = { 243 | "databases": { 244 | "_memory": {"queries": {"latest": {"sql": sql}}}, 245 | }, 246 | "plugins": {"datasette-atom": {"allow_unsafe_html_in_canned_queries": config}}, 247 | } 248 | ds = Datasette( 249 | memory=True, 250 | metadata=metadata, 251 | ) 252 | response = await ds.client.get("/_memory/latest.atom") 253 | assert 200 == response.status_code 254 | assert "application/xml; charset=utf-8" == response.headers["content-type"] 255 | if should_allow: 256 | assert ( 257 | '<iframe>An iframe!</iframe>' 258 | in response.text 259 | ) 260 | else: 261 | assert ( 262 | '&lt;iframe&gt;An iframe!&lt;/iframe&gt;' 263 | in response.text 264 | ) 265 | --------------------------------------------------------------------------------