├── .gitattributes ├── .github └── workflows │ ├── publish.yml │ └── test.yml ├── .gitignore ├── LICENSE ├── Pipfile ├── Pipfile.lock ├── README.md ├── datasette_query_files └── __init__.py ├── pytest.ini ├── setup.py └── tests ├── legislators.db ├── metadata.yml ├── queries ├── legislators │ ├── presidents.json │ ├── presidents.sql │ ├── women_senators.sql │ └── women_senators.yml └── writable │ ├── write_message.sql │ └── write_message.yaml └── test_query_files.py /.gitattributes: -------------------------------------------------------------------------------- 1 | *.db filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python Package 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | python-version: ["3.7", "3.8", "3.9", "3.10"] 13 | steps: 14 | - uses: actions/checkout@v3 15 | with: 16 | lfs: true 17 | - name: Set up Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | cache: pip 22 | cache-dependency-path: '**/setup.py' 23 | - name: Install dependencies 24 | run: | 25 | pip install -e '.[test]' 26 | - name: Run tests 27 | run: | 28 | pytest 29 | 30 | deploy: 31 | runs-on: ubuntu-latest 32 | needs: [test] 33 | steps: 34 | - uses: actions/checkout@v3 35 | with: 36 | lfs: true 37 | - name: Set up Python 38 | uses: actions/setup-python@v2 39 | with: 40 | python-version: '3.10' 41 | cache: pip 42 | cache-dependency-path: '**/setup.py' 43 | - name: Install dependencies 44 | run: | 45 | pip install setuptools wheel twine build 46 | - name: Publish 47 | env: 48 | TWINE_USERNAME: __token__ 49 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} 50 | run: | 51 | python -m build 52 | twine upload dist/* 53 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | workflow_dispatch: 6 | 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | python-version: ["3.7", "3.8", "3.9", "3.10"] 13 | steps: 14 | - uses: actions/checkout@v3 15 | with: 16 | lfs: true 17 | 18 | - name: Set up Python ${{ matrix.python-version }} 19 | if: hashFiles('setup.py') 20 | uses: actions/setup-python@v2 21 | with: 22 | python-version: ${{ matrix.python-version }} 23 | cache: pip 24 | cache-dependency-path: "**/setup.py" 25 | - name: Install dependencies 26 | if: hashFiles('setup.py') 27 | run: | 28 | pip install -e '.[test]' 29 | - name: Run tests 30 | if: hashFiles('setup.py') 31 | run: | 32 | pytest 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | venv 6 | .eggs 7 | .pytest_cache 8 | *.egg-info 9 | .DS_Store 10 | .vscode 11 | dist 12 | build 13 | 14 | tests/writable.db 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | datasette-query-files = {editable = true, extras = ["test"], path = "."} 8 | 9 | [dev-packages] 10 | 11 | [requires] 12 | python_version = "3.10" 13 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "4f8663f5b5edd14da5ac48718d89e9d126994c40e63ff20e3ba5c0305a11f081" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.10" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "aiofiles": { 20 | "hashes": [ 21 | "sha256:7a973fc22b29e9962d0897805ace5856e6a566ab1f0c8e5c91ff6c866519c937", 22 | "sha256:8334f23235248a3b2e83b2c3a78a22674f39969b96397126cc93664d9a901e59" 23 | ], 24 | "markers": "python_version >= '3.6' and python_version < '4.0'", 25 | "version": "==0.8.0" 26 | }, 27 | "anyio": { 28 | "hashes": [ 29 | "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b", 30 | "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be" 31 | ], 32 | "markers": "python_full_version >= '3.6.2'", 33 | "version": "==3.6.1" 34 | }, 35 | "asgi-csrf": { 36 | "hashes": [ 37 | "sha256:6e9d3bddaeac1a8fd33b188fe2abc8271f9085ab7be6e1a7f4d3c9df5d7f741a", 38 | "sha256:e974cffb8a4ab84a28a0088acbf7a4ecc5be4a64f08dcbe19c60dea103da01c0" 39 | ], 40 | "version": "==0.9" 41 | }, 42 | "asgiref": { 43 | "hashes": [ 44 | "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4", 45 | "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424" 46 | ], 47 | "markers": "python_version >= '3.7'", 48 | "version": "==3.5.2" 49 | }, 50 | "attrs": { 51 | "hashes": [ 52 | "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", 53 | "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" 54 | ], 55 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 56 | "version": "==21.4.0" 57 | }, 58 | "certifi": { 59 | "hashes": [ 60 | "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7", 61 | "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a" 62 | ], 63 | "markers": "python_version >= '3.6'", 64 | "version": "==2022.5.18.1" 65 | }, 66 | "click": { 67 | "hashes": [ 68 | "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1", 69 | "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb" 70 | ], 71 | "markers": "python_version >= '3.6'", 72 | "version": "==8.0.4" 73 | }, 74 | "click-default-group": { 75 | "hashes": [ 76 | "sha256:d9560e8e8dfa44b3562fbc9425042a0fd6d21956fcc2db0077f63f34253ab904" 77 | ], 78 | "version": "==1.2.2" 79 | }, 80 | "datasette": { 81 | "hashes": [ 82 | "sha256:060ed8737c5fc6d83e6ab35b84d91f9c0e39e3caf7d794106e39b4bef9b559eb", 83 | "sha256:96978386e8e70cf42d09016236ed32b9de742e7861b98661cbb6cab7986dd3dd" 84 | ], 85 | "markers": "python_version >= '3.7'", 86 | "version": "==0.61.1" 87 | }, 88 | "datasette-query-files": { 89 | "editable": true, 90 | "extras": [ 91 | "test" 92 | ], 93 | "path": "." 94 | }, 95 | "h11": { 96 | "hashes": [ 97 | "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6", 98 | "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042" 99 | ], 100 | "markers": "python_version >= '3.6'", 101 | "version": "==0.12.0" 102 | }, 103 | "httpcore": { 104 | "hashes": [ 105 | "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6", 106 | "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b" 107 | ], 108 | "markers": "python_version >= '3.7'", 109 | "version": "==0.15.0" 110 | }, 111 | "httpx": { 112 | "hashes": [ 113 | "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b", 114 | "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef" 115 | ], 116 | "markers": "python_version >= '3.7'", 117 | "version": "==0.23.0" 118 | }, 119 | "hupper": { 120 | "hashes": [ 121 | "sha256:cd6f51b72c7587bc9bce8a65ecd025a1e95f1b03284519bfe91284d010316cd9", 122 | "sha256:f683850d62598c02faf3c7cdaaa727d8cbe3c5a2497a5737a8358386903b2601" 123 | ], 124 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 125 | "version": "==1.10.3" 126 | }, 127 | "idna": { 128 | "hashes": [ 129 | "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", 130 | "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" 131 | ], 132 | "markers": "python_version >= '3.5'", 133 | "version": "==3.3" 134 | }, 135 | "iniconfig": { 136 | "hashes": [ 137 | "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", 138 | "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" 139 | ], 140 | "version": "==1.1.1" 141 | }, 142 | "itsdangerous": { 143 | "hashes": [ 144 | "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44", 145 | "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a" 146 | ], 147 | "markers": "python_version >= '3.7'", 148 | "version": "==2.1.2" 149 | }, 150 | "janus": { 151 | "hashes": [ 152 | "sha256:2596ea5482711c1ee3ef2df6c290aaf370a13c55a007826e8f7c32d696d1d00a", 153 | "sha256:df976f2cdcfb034b147a2d51edfc34ff6bfb12d4e2643d3ad0e10de058cb1612" 154 | ], 155 | "markers": "python_version >= '3.7'", 156 | "version": "==1.0.0" 157 | }, 158 | "jinja2": { 159 | "hashes": [ 160 | "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8", 161 | "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7" 162 | ], 163 | "markers": "python_version >= '3.6'", 164 | "version": "==3.0.3" 165 | }, 166 | "markupsafe": { 167 | "hashes": [ 168 | "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003", 169 | "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88", 170 | "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", 171 | "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7", 172 | "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a", 173 | "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603", 174 | "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1", 175 | "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135", 176 | "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247", 177 | "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", 178 | "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601", 179 | "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77", 180 | "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02", 181 | "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", 182 | "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63", 183 | "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f", 184 | "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980", 185 | "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", 186 | "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", 187 | "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff", 188 | "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96", 189 | "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1", 190 | "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925", 191 | "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a", 192 | "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6", 193 | "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", 194 | "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", 195 | "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", 196 | "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f", 197 | "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3", 198 | "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c", 199 | "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a", 200 | "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", 201 | "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a", 202 | "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a", 203 | "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37", 204 | "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452", 205 | "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", 206 | "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", 207 | "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7" 208 | ], 209 | "markers": "python_version >= '3.7'", 210 | "version": "==2.1.1" 211 | }, 212 | "mergedeep": { 213 | "hashes": [ 214 | "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", 215 | "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307" 216 | ], 217 | "markers": "python_version >= '3.6'", 218 | "version": "==1.3.4" 219 | }, 220 | "packaging": { 221 | "hashes": [ 222 | "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", 223 | "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" 224 | ], 225 | "markers": "python_version >= '3.6'", 226 | "version": "==21.3" 227 | }, 228 | "pint": { 229 | "hashes": [ 230 | "sha256:e1d4989ff510b378dad64f91711e7bdabe5ca78d75b06a18569ac454678c4baf" 231 | ], 232 | "markers": "python_version >= '3.8'", 233 | "version": "==0.19.2" 234 | }, 235 | "pluggy": { 236 | "hashes": [ 237 | "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", 238 | "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" 239 | ], 240 | "markers": "python_version >= '3.6'", 241 | "version": "==1.0.0" 242 | }, 243 | "py": { 244 | "hashes": [ 245 | "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", 246 | "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" 247 | ], 248 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 249 | "version": "==1.11.0" 250 | }, 251 | "pyparsing": { 252 | "hashes": [ 253 | "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", 254 | "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" 255 | ], 256 | "markers": "python_full_version >= '3.6.8'", 257 | "version": "==3.0.9" 258 | }, 259 | "pytest": { 260 | "hashes": [ 261 | "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c", 262 | "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45" 263 | ], 264 | "markers": "python_version >= '3.7'", 265 | "version": "==7.1.2" 266 | }, 267 | "pytest-asyncio": { 268 | "hashes": [ 269 | "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213", 270 | "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91", 271 | "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84" 272 | ], 273 | "markers": "python_version >= '3.7'", 274 | "version": "==0.18.3" 275 | }, 276 | "python-baseconv": { 277 | "hashes": [ 278 | "sha256:0539f8bd0464013b05ad62e0a1673f0ac9086c76b43ebf9f833053527cd9931b" 279 | ], 280 | "version": "==1.2.2" 281 | }, 282 | "python-multipart": { 283 | "hashes": [ 284 | "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43" 285 | ], 286 | "version": "==0.0.5" 287 | }, 288 | "pyyaml": { 289 | "hashes": [ 290 | "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293", 291 | "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b", 292 | "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57", 293 | "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b", 294 | "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4", 295 | "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07", 296 | "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba", 297 | "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9", 298 | "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287", 299 | "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513", 300 | "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0", 301 | "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0", 302 | "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92", 303 | "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f", 304 | "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2", 305 | "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc", 306 | "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c", 307 | "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86", 308 | "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4", 309 | "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c", 310 | "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34", 311 | "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b", 312 | "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c", 313 | "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb", 314 | "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737", 315 | "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3", 316 | "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d", 317 | "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53", 318 | "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78", 319 | "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803", 320 | "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a", 321 | "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174", 322 | "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5" 323 | ], 324 | "markers": "python_version >= '3.6'", 325 | "version": "==6.0" 326 | }, 327 | "rfc3986": { 328 | "extras": [ 329 | "idna2008" 330 | ], 331 | "hashes": [ 332 | "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", 333 | "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" 334 | ], 335 | "version": "==1.5.0" 336 | }, 337 | "six": { 338 | "hashes": [ 339 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", 340 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" 341 | ], 342 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 343 | "version": "==1.16.0" 344 | }, 345 | "sniffio": { 346 | "hashes": [ 347 | "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663", 348 | "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de" 349 | ], 350 | "markers": "python_version >= '3.5'", 351 | "version": "==1.2.0" 352 | }, 353 | "tomli": { 354 | "hashes": [ 355 | "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", 356 | "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" 357 | ], 358 | "markers": "python_version >= '3.7'", 359 | "version": "==2.0.1" 360 | }, 361 | "typing-extensions": { 362 | "hashes": [ 363 | "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708", 364 | "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376" 365 | ], 366 | "markers": "python_version >= '3.7'", 367 | "version": "==4.2.0" 368 | }, 369 | "uvicorn": { 370 | "hashes": [ 371 | "sha256:19e2a0e96c9ac5581c01eb1a79a7d2f72bb479691acd2b8921fce48ed5b961a6", 372 | "sha256:5180f9d059611747d841a4a4c4ab675edf54c8489e97f96d0583ee90ac3bfc23" 373 | ], 374 | "markers": "python_version >= '3.7'", 375 | "version": "==0.17.6" 376 | } 377 | }, 378 | "develop": {} 379 | } 380 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # datasette-query-files 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/datasette-query-files.svg)](https://pypi.org/project/datasette-query-files/) 4 | [![Changelog](https://img.shields.io/github/v/release/eyeseast/datasette-query-files?include_prereleases&label=changelog)](https://github.com/eyeseast/datasette-query-files/releases) 5 | [![Tests](https://github.com/eyeseast/datasette-query-files/workflows/Test/badge.svg)](https://github.com/eyeseast/datasette-query-files/actions?query=workflow%3ATest) 6 | [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/eyeseast/datasette-query-files/blob/main/LICENSE) 7 | 8 | Write Datasette canned queries as plain SQL files. 9 | 10 | ## Installation 11 | 12 | Install this plugin in the same environment as Datasette. 13 | 14 | datasette install datasette-query-files 15 | 16 | Or using `pip` or `pipenv`: 17 | 18 | pip install datasette-query-files 19 | pipenv install datasette-query-files 20 | 21 | ## Usage 22 | 23 | This plugin will look for [canned queries](https://docs.datasette.io/en/stable/sql_queries.html#canned-queries) in the filesystem, in addition any defined in metadata. 24 | 25 | Let's say you're working in a directory called `project-directory`, with a database file called `my-project.db`. Start by creating a `queries` directory with a `my-project` directory inside it. Any SQL file inside that `my-project` folder will become a canned query that can be run on the `my-project` database. If you have a `query-name.sql` file and a `query-name.json` (or `query-name.yml`) file in the same directory, the JSON file will be used as query metadata. 26 | 27 | ``` 28 | project-directory/ 29 | my-project.db 30 | queries/ 31 | my-project/ 32 | query-name.sql # a query 33 | query-name.yml # query metadata 34 | ``` 35 | 36 | ## Development 37 | 38 | To set up this plugin locally, first checkout the code. Then create a new virtual environment: 39 | 40 | cd datasette-query-files 41 | python3 -m venv venv 42 | source venv/bin/activate 43 | 44 | Now install the dependencies and test dependencies: 45 | 46 | pip install -e '.[test]' 47 | 48 | To run the tests: 49 | 50 | pytest 51 | -------------------------------------------------------------------------------- /datasette_query_files/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | import yaml 3 | import aiofiles 4 | import aiofiles.os 5 | from pathlib import Path 6 | from datasette import hookimpl 7 | 8 | PLUGIN_NAME = "datasette-query-files" 9 | 10 | 11 | @hookimpl 12 | def canned_queries(datasette, database): 13 | config = datasette.plugin_config(PLUGIN_NAME) or {} 14 | query_directory = Path(config.get("query_directory", "queries")).resolve() 15 | db_dir = query_directory / database 16 | 17 | async def inner(): 18 | queries = {} 19 | if not await aiofiles.os.path.isdir(db_dir): 20 | return queries 21 | 22 | for path in db_dir.iterdir(): 23 | if path.suffix == ".sql": 24 | queries[path.stem] = await get_canned_query(path, database) 25 | 26 | return queries 27 | 28 | return inner 29 | 30 | 31 | async def get_canned_query(path, database): 32 | async with aiofiles.open(path) as f: 33 | sql = await f.read() 34 | 35 | # todo look for metadata 36 | metadata_paths = [ 37 | path.parent / (path.stem + ext) for ext in [".json", ".yml", ".yaml"] 38 | ] 39 | metadata = await get_metadata(*metadata_paths) 40 | metadata["sql"] = sql 41 | return metadata 42 | 43 | 44 | async def get_metadata(*paths): 45 | metadata = {} 46 | content = format = None 47 | 48 | for path in paths: 49 | if await aiofiles.os.path.isfile(path): 50 | async with aiofiles.open(path) as f: 51 | content = await f.read() 52 | format = path.suffix 53 | break 54 | 55 | if content: 56 | metadata = parse(content, format) 57 | 58 | return metadata 59 | 60 | 61 | def parse(content, format): 62 | if format == ".json": 63 | return json.loads(content) 64 | 65 | if format in {".yaml", ".yml"}: 66 | return yaml.load(content, Loader=yaml.SafeLoader) 67 | 68 | return {} -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | asyncio_mode = strict 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | import os 3 | 4 | VERSION = "0.1.1" 5 | 6 | 7 | def get_long_description(): 8 | with open( 9 | os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md"), 10 | encoding="utf8", 11 | ) as fp: 12 | return fp.read() 13 | 14 | 15 | setup( 16 | name="datasette-query-files", 17 | description="Write Datasette canned queries as plain SQL files", 18 | long_description=get_long_description(), 19 | long_description_content_type="text/markdown", 20 | author="Chris Amico", 21 | url="https://github.com/eyeseast/datasette-query-files", 22 | project_urls={ 23 | "Issues": "https://github.com/eyeseast/datasette-query-files/issues", 24 | "CI": "https://github.com/eyeseast/datasette-query-files/actions", 25 | "Changelog": "https://github.com/eyeseast/datasette-query-files/releases", 26 | }, 27 | license="Apache License, Version 2.0", 28 | classifiers=[ 29 | "Framework :: Datasette", 30 | "License :: OSI Approved :: Apache Software License", 31 | ], 32 | version=VERSION, 33 | packages=["datasette_query_files"], 34 | entry_points={"datasette": ["query_files = datasette_query_files"]}, 35 | install_requires=["datasette"], 36 | extras_require={"test": ["pytest", "pytest-asyncio"]}, 37 | python_requires=">=3.7", 38 | ) 39 | -------------------------------------------------------------------------------- /tests/legislators.db: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:f2711a7aa1b1d03c96fb67561710fe665984eec359faa8a279b89d1f287b325a 3 | size 5468160 4 | -------------------------------------------------------------------------------- /tests/metadata.yml: -------------------------------------------------------------------------------- 1 | # standard metadata, for comparison 2 | 3 | databases: 4 | legislators: 5 | queries: 6 | by_gender: 7 | title: Legislators by gender 8 | sql: | 9 | select 10 | bio_gender as gender, 11 | count(*) as count 12 | from 13 | legislators 14 | group by 15 | bio_gender 16 | -------------------------------------------------------------------------------- /tests/queries/legislators/presidents.json: -------------------------------------------------------------------------------- 1 | {"title": "All the presidents"} -------------------------------------------------------------------------------- /tests/queries/legislators/presidents.sql: -------------------------------------------------------------------------------- 1 | select 2 | * 3 | from 4 | executives 5 | where 6 | id in ( 7 | select 8 | executive_id 9 | from 10 | executive_terms 11 | where 12 | type = 'prez' 13 | ) -------------------------------------------------------------------------------- /tests/queries/legislators/women_senators.sql: -------------------------------------------------------------------------------- 1 | select 2 | legislators.id, 3 | legislators.name, 4 | legislator_terms.party, 5 | count(*) as terms 6 | from 7 | legislators 8 | inner join legislator_terms on legislators.id = legislator_terms.legislator_id 9 | where 10 | legislator_terms.type = 'sen' 11 | and legislators.bio_gender = 'F' 12 | group by 13 | legislators.id 14 | order by 15 | terms desc -------------------------------------------------------------------------------- /tests/queries/legislators/women_senators.yml: -------------------------------------------------------------------------------- 1 | title: Women in the Senate 2 | -------------------------------------------------------------------------------- /tests/queries/writable/write_message.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO 2 | messages (user_id, message, datetime) 3 | VALUES 4 | (:_actor_id, :message, :_now_datetime_utc) -------------------------------------------------------------------------------- /tests/queries/writable/write_message.yaml: -------------------------------------------------------------------------------- 1 | write: true 2 | allow: 3 | id: [root] 4 | on_success_message: "log message inserted" 5 | -------------------------------------------------------------------------------- /tests/test_query_files.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import pytest 3 | import sqlite3 4 | import httpx 5 | from datasette.app import Datasette 6 | from datasette.app import DatasetteClient 7 | 8 | PLUGIN_NAME = "datasette-query-files" 9 | TESTS = pathlib.Path(__file__).parent 10 | LEGISLATORS = TESTS / "legislators.db" 11 | QUERIES = TESTS / "queries" 12 | 13 | SQL_FILES = { 14 | "presidents": QUERIES / "legislators" / "presidents.sql", 15 | "women_senators": QUERIES / "legislators" / "women_senators.sql", 16 | } 17 | 18 | METADATA = { 19 | "presidents": {"title": "All the presidents"}, 20 | "women_senators": {"title": "Women in the Senate"}, 21 | } 22 | 23 | CREATE_WRITE_TABLE = """ 24 | create table messages (user_id text, message text, datetime text); 25 | """ 26 | 27 | 28 | @pytest.fixture 29 | def ds(tmp_path): 30 | WRITABLE = tmp_path / "writable.db" 31 | 32 | writable = sqlite3.connect(WRITABLE) 33 | writable.executescript(CREATE_WRITE_TABLE) 34 | 35 | yield Datasette( 36 | [LEGISLATORS, WRITABLE], 37 | metadata={"plugins": {PLUGIN_NAME: {"query_directory": QUERIES}}}, 38 | ) 39 | 40 | WRITABLE.unlink() 41 | 42 | 43 | @pytest.mark.asyncio 44 | async def test_plugin_is_installed(): 45 | datasette = Datasette(memory=True) 46 | response = await datasette.client.get("/-/plugins.json") 47 | assert response.status_code == 200 48 | installed_plugins = {p["name"] for p in response.json()} 49 | assert PLUGIN_NAME in installed_plugins 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_query_exists(ds): 54 | presidents_query = SQL_FILES["presidents"] 55 | assert presidents_query.exists() # make sure it's not broken 56 | 57 | url = ds.urls.database("legislators", format="json") 58 | resp = await ds.client.get(url) 59 | data = resp.json() 60 | queries = data["queries"] # let this error if it errors 61 | 62 | assert len(queries) > 0 63 | assert presidents_query.stem in [q["name"] for q in queries] 64 | 65 | 66 | @pytest.mark.asyncio 67 | async def test_query_results(ds): 68 | url = ds.urls.query("legislators", "presidents", format="json") 69 | resp = await ds.client.get(url) 70 | 71 | assert resp.status_code == 200 72 | 73 | data = resp.json() 74 | 75 | # 45 presidents, counting Grover Cleveland once 76 | assert len(data["rows"]) == 45 77 | 78 | 79 | @pytest.mark.asyncio 80 | async def test_query_metadata(ds): 81 | url = ds.urls.database("legislators", format="json") 82 | 83 | resp = await ds.client.get(url) 84 | data = resp.json() 85 | queries = {q["name"]: q for q in data["queries"]} 86 | 87 | for name, path in SQL_FILES.items(): 88 | metadata = METADATA[name] 89 | query = queries[name] 90 | 91 | assert metadata["title"] == query["title"] 92 | 93 | 94 | @pytest.mark.asyncio 95 | async def test_write_message(ds): 96 | assert ds._root_token is not None 97 | token = ds._root_token 98 | async with httpx.AsyncClient( 99 | app=ds.app(), 100 | base_url="http://localhost", 101 | cookies={"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")}, 102 | ) as client: 103 | 104 | url = ds.urls.query("writable", "write_message") 105 | r1 = await client.get(url) 106 | assert 200 == r1.status_code 107 | 108 | csrftoken = r1.cookies["ds_csrftoken"] 109 | r2 = await client.post( 110 | url, 111 | data={ 112 | "message": "Hello, world!", 113 | "csrftoken": csrftoken, 114 | }, 115 | headers={"Accept": "application/json"}, 116 | ) 117 | 118 | assert 200 == r2.status_code 119 | 120 | data = r2.json() 121 | 122 | assert data["ok"] 123 | 124 | 125 | @pytest.mark.asyncio 126 | async def _test_write_message(ds): 127 | cookie = ds.sign({"a": {"id": "test"}}, "actor") 128 | url = ds.urls.query("writable", "write_message") 129 | 130 | resp = await ds.client.post( 131 | url, 132 | data={"message": "Hello, world!"}, 133 | headers={"Accept": "application/json"}, 134 | cookies={"ds_actor": cookie}, 135 | ) 136 | 137 | assert 200 == resp.status_code 138 | --------------------------------------------------------------------------------