├── .github └── workflows │ ├── publish.yml │ └── test.yml ├── .gitignore ├── LICENSE ├── README.md ├── datasette-edit-schema.png ├── datasette_edit_schema ├── __init__.py ├── static │ ├── draggable.1.0.0-beta.11.bundle.js │ └── draggable.1.0.0-beta.11.bundle.min.js ├── templates │ ├── edit_schema_create_table.html │ ├── edit_schema_database.html │ ├── edit_schema_index.html │ └── edit_schema_table.html └── utils.py ├── pyproject.toml ├── tests ├── __init__.py ├── conftest.py └── test_edit_schema.py └── update-screenshot.sh /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python Package 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] 13 | steps: 14 | - uses: actions/checkout@v4 15 | - name: Set up Python ${{ matrix.python-version }} 16 | uses: actions/setup-python@v5 17 | with: 18 | python-version: ${{ matrix.python-version }} 19 | cache: pip 20 | cache-dependency-path: pyproject.toml 21 | - name: Install dependencies 22 | run: | 23 | pip install -e '.[test]' 24 | - name: Run tests 25 | run: | 26 | pytest 27 | deploy: 28 | runs-on: ubuntu-latest 29 | environment: release 30 | permissions: 31 | id-token: write 32 | needs: [test] 33 | steps: 34 | - uses: actions/checkout@v4 35 | - name: Set up Python 36 | uses: actions/setup-python@v5 37 | with: 38 | python-version: '3.12' 39 | cache: pip 40 | cache-dependency-path: pyproject.toml 41 | - name: Install dependencies 42 | run: | 43 | pip install setuptools wheel build 44 | - name: Build 45 | run: | 46 | python -m build 47 | - name: Publish 48 | uses: pypa/gh-action-pypi-publish@release/v1 49 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: [push, pull_request] 4 | 5 | permissions: 6 | contents: read 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | cache: pip 21 | cache-dependency-path: pyproject.toml 22 | - name: Install dependencies 23 | run: | 24 | pip install -e '.[test]' 25 | - name: Run tests 26 | run: | 27 | pytest 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | venv 6 | .eggs 7 | .pytest_cache 8 | *.egg-info 9 | .DS_Store 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # datasette-edit-schema 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/datasette-edit-schema.svg)](https://pypi.org/project/datasette-edit-schema/) 4 | [![Changelog](https://img.shields.io/github/v/release/simonw/datasette-edit-schema?include_prereleases&label=changelog)](https://github.com/simonw/datasette-edit-schema/releases) 5 | [![Tests](https://github.com/simonw/datasette-edit-schema/workflows/Test/badge.svg)](https://github.com/simonw/datasette-edit-schema/actions?query=workflow%3ATest) 6 | [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette-edit-schema/blob/master/LICENSE) 7 | 8 | Datasette plugin for modifying table schemas 9 | 10 | > :warning: The latest alpha release depends on Datasette 1.09a. Use [version 0.7.1](https://github.com/simonw/datasette-edit-schema/blob/0.7.1/README.md) with older releases of Datasette. 11 | 12 | ## Features 13 | 14 | * Add new columns to a table 15 | * Rename columns in a table 16 | * Modify the type of columns in a table 17 | * Re-order the columns in a table 18 | * Rename a table 19 | * Delete a table 20 | * Change the primary key of a table to another column containing unique values 21 | * Update the foreign key constraints on a table 22 | * Add an index (or unique index) to a column on a table 23 | * Drop an index from a table 24 | 25 | ## Installation 26 | 27 | Install this plugin in the same environment as Datasette. 28 | ```bash 29 | pip install datasette-edit-schema 30 | ``` 31 | ## Usage 32 | 33 | Navigate to `/-/edit-schema/dbname/tablename` on your Datasette instance to edit a specific table. 34 | 35 | Use `/-/edit-schema/dbname` to create a new table in a specific database. 36 | 37 | By default only [the root actor](https://datasette.readthedocs.io/en/stable/authentication.html#using-the-root-actor) can access the page - so you'll need to run Datasette with the `--root` option and click on the link shown in the terminal to sign in and access the page. 38 | 39 | ## Permissions 40 | 41 | The `edit-schema` permission provides access to all functionality. 42 | 43 | You can use permission plugins such as [datasette-permissions-sql](https://github.com/simonw/datasette-permissions-sql) to grant additional access to the write interface. 44 | 45 | These permission checks will call the `permission_allowed()` plugin hook with three arguments: 46 | 47 | - `action` will be the string `"edit-schema"` 48 | - `actor` will be the currently authenticated actor - usually a dictionary 49 | - `resource` will be the string name of the database 50 | 51 | You can instead use more finely-grained permissions from the default Datasette permissions collection: 52 | 53 | - `create-table` allows users to create a new table. The `resource` will be the name of the database. 54 | - `drop-table` allows users to drop a table. The `resource` will be a tuple of `(database_name, table_name)`. 55 | - `alter-table` allows users to alter a table. The `resource` will be a tuple of `(database_name, table_name)`. 56 | 57 | To rename a table a user must have both `drop-table` permission for that table and `create-table` permission for that database. 58 | 59 | For example, to configure Datasette to allow the user with ID `pelican` to create, alter and drop tables in the `marketing` database and to alter just the `notes` table in the `sales` database, you could use the following configuration: 60 | 61 | ```yaml 62 | databases: 63 | marketing: 64 | permissions: 65 | create-table: 66 | id: pelican 67 | drop-table: 68 | id: pelican 69 | alter-table: 70 | id: pelican 71 | sales: 72 | tables: 73 | notes: 74 | permissions: 75 | alter-table: 76 | id: pelican 77 | ``` 78 | 79 | ## Events 80 | 81 | This plugin fires `create-table`, `alter-table` and `drop-table` events when tables are modified, using the [Datasette Events](https://docs.datasette.io/en/latest/events.html) system introduced in [Datasette 1.0a8](https://docs.datasette.io/en/latest/changelog.html#a8-2024-02-07). 82 | 83 | ## Screenshot 84 | 85 | ![datasette-edit-schema interface](https://raw.githubusercontent.com/simonw/datasette-edit-schema/main/datasette-edit-schema.png) 86 | 87 | ## Development 88 | 89 | To set up this plugin locally, first checkout the code. Then create a new virtual environment: 90 | ```bash 91 | cd datasette-edit-schema 92 | python3 -mvenv venv 93 | source venv/bin/activate 94 | ``` 95 | Or if you are using `pipenv`: 96 | ```bash 97 | pipenv shell 98 | ``` 99 | Now install the dependencies and tests: 100 | ```bash 101 | pip install -e '.[test]' 102 | ``` 103 | To run the tests: 104 | ```bash 105 | pytest 106 | ``` -------------------------------------------------------------------------------- /datasette-edit-schema.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simonw/datasette-edit-schema/e393cda78f126fb80477690626754dd94a44d3a7/datasette-edit-schema.png -------------------------------------------------------------------------------- /datasette_edit_schema/__init__.py: -------------------------------------------------------------------------------- 1 | from datasette import hookimpl 2 | from datasette.events import CreateTableEvent, AlterTableEvent, DropTableEvent 3 | from datasette.utils.asgi import Response, NotFound, Forbidden 4 | from datasette.utils import sqlite3, tilde_decode, tilde_encode 5 | from urllib.parse import quote_plus, unquote_plus 6 | import sqlite_utils 7 | import textwrap 8 | from .utils import ( 9 | examples_for_columns, 10 | get_primary_keys, 11 | potential_foreign_keys, 12 | potential_primary_keys, 13 | ) 14 | 15 | try: 16 | from datasette import events 17 | except ImportError: # Pre Datasette 1.0a8 18 | events = None 19 | 20 | # Don't attempt to detect foreign keys on tables larger than this: 21 | FOREIGN_KEY_DETECTION_LIMIT = 10_000 22 | 23 | 24 | @hookimpl 25 | def permission_allowed(actor, action, resource): 26 | if ( 27 | action == "edit-schema" 28 | and actor 29 | and actor.get("id") == "root" 30 | and resource != "_internal" 31 | ): 32 | return True 33 | 34 | 35 | @hookimpl 36 | def table_actions(datasette, actor, database, table): 37 | async def inner(): 38 | if not await can_alter_table(datasette, actor, database, table): 39 | return [] 40 | return [ 41 | { 42 | "href": datasette.urls.path( 43 | "/-/edit-schema/{}/{}".format(database, tilde_encode(table)) 44 | ), 45 | "label": "Edit table schema", 46 | "description": "Rename the table, add and remove columns...", 47 | } 48 | ] 49 | 50 | return inner 51 | 52 | 53 | async def can_create_table(datasette, actor, database): 54 | if await datasette.permission_allowed( 55 | actor, "edit-schema", resource=database, default=False 56 | ): 57 | return True 58 | # Or maybe they have create-table 59 | if await datasette.permission_allowed( 60 | actor, "create-table", resource=database, default=False 61 | ): 62 | return True 63 | return False 64 | 65 | 66 | async def can_alter_table(datasette, actor, database, table): 67 | if await datasette.permission_allowed( 68 | actor, "edit-schema", resource=database, default=False 69 | ): 70 | return True 71 | if await datasette.permission_allowed( 72 | actor, "alter-table", resource=(database, table), default=False 73 | ): 74 | return True 75 | return False 76 | 77 | 78 | async def can_rename_table(datasette, actor, database, table): 79 | if not await can_drop_table(datasette, actor, database, table): 80 | return False 81 | if not await can_create_table(datasette, actor, database): 82 | return False 83 | return True 84 | 85 | 86 | async def can_drop_table(datasette, actor, database, table): 87 | if await datasette.permission_allowed( 88 | actor, "edit-schema", resource=database, default=False 89 | ): 90 | return True 91 | # Or maybe they have drop-table 92 | if await datasette.permission_allowed( 93 | actor, "drop-table", resource=(database, table), default=False 94 | ): 95 | return True 96 | return False 97 | 98 | 99 | @hookimpl 100 | def database_actions(datasette, actor, database): 101 | async def inner(): 102 | if not await can_create_table(datasette, actor, database): 103 | return [] 104 | return [ 105 | { 106 | "href": datasette.urls.path( 107 | "/-/edit-schema/{}/-/create".format(database) 108 | ), 109 | "label": "Create a table", 110 | "description": "Define a new table with specified columns", 111 | } 112 | ] 113 | 114 | return inner 115 | 116 | 117 | @hookimpl 118 | def register_routes(): 119 | return [ 120 | (r"^/-/edit-schema$", edit_schema_index), 121 | (r"^/-/edit-schema/(?P[^/]+)$", edit_schema_database), 122 | (r"^/-/edit-schema/(?P[^/]+)/-/create$", edit_schema_create_table), 123 | (r"^/-/edit-schema/(?P[^/]+)/(?P[^/]+)$", edit_schema_table), 124 | ] 125 | 126 | 127 | TYPES = { 128 | str: "TEXT", 129 | float: "REAL", 130 | int: "INTEGER", 131 | bytes: "BLOB", 132 | } 133 | REV_TYPES = {v: k for k, v in TYPES.items()} 134 | TYPE_NAMES = { 135 | "TEXT": "Text", 136 | "REAL": "Floating point", 137 | "INTEGER": "Integer", 138 | "BLOB": "Binary data", 139 | } 140 | 141 | 142 | def get_databases(datasette): 143 | return [ 144 | db 145 | for db in datasette.databases.values() 146 | if db.is_mutable and db.name != "_internal" 147 | ] 148 | 149 | 150 | async def check_permissions(datasette, request, database): 151 | if not await datasette.permission_allowed( 152 | request.actor, "edit-schema", resource=database, default=False 153 | ): 154 | raise Forbidden("Permission denied for edit-schema") 155 | 156 | 157 | async def edit_schema_index(datasette, request): 158 | database_names = [db.name for db in get_databases(datasette)] 159 | # Check permissions for each one 160 | allowed_databases = [ 161 | name 162 | for name in database_names 163 | if await datasette.permission_allowed( 164 | request.actor, "edit-schema", resource=name, default=False 165 | ) 166 | ] 167 | if not allowed_databases: 168 | raise Forbidden("Permission denied for edit-schema") 169 | 170 | if len(allowed_databases) == 1: 171 | return Response.redirect( 172 | "/-/edit-schema/{}".format(quote_plus(allowed_databases[0])) 173 | ) 174 | 175 | return Response.html( 176 | await datasette.render_template( 177 | "edit_schema_index.html", 178 | { 179 | "databases": allowed_databases, 180 | }, 181 | request=request, 182 | ) 183 | ) 184 | 185 | 186 | async def edit_schema_database(request, datasette): 187 | databases = get_databases(datasette) 188 | database_name = request.url_vars["database"] 189 | await check_permissions(datasette, request, database_name) 190 | just_these_tables = set(request.args.getlist("table")) 191 | try: 192 | database = [db for db in databases if db.name == database_name][0] 193 | except IndexError: 194 | raise NotFound("Database not found") 195 | tables = [] 196 | hidden_tables = set(await database.hidden_table_names()) 197 | for table_name in await database.table_names(): 198 | if just_these_tables and table_name not in just_these_tables: 199 | continue 200 | if table_name in hidden_tables: 201 | continue 202 | 203 | def get_columns(conn): 204 | return [ 205 | {"name": column, "type": dtype} 206 | for column, dtype in sqlite_utils.Database(conn)[ 207 | table_name 208 | ].columns_dict.items() 209 | ] 210 | 211 | columns = await database.execute_write_fn(get_columns, block=True) 212 | tables.append({"name": table_name, "columns": columns}) 213 | return Response.html( 214 | await datasette.render_template( 215 | "edit_schema_database.html", 216 | { 217 | "database": database, 218 | "tables": tables, 219 | "tilde_encode": tilde_encode, 220 | }, 221 | request=request, 222 | ) 223 | ) 224 | 225 | 226 | async def edit_schema_create_table(request, datasette): 227 | database_name = request.url_vars["database"] 228 | if not await can_create_table(datasette, request.actor, database_name): 229 | raise Forbidden("Permission denied for create-table") 230 | try: 231 | db = datasette.get_database(database_name) 232 | except KeyError: 233 | raise NotFound("Database not found") 234 | 235 | if request.method == "POST": 236 | formdata = await request.post_vars() 237 | table_name = formdata.get("table_name") or "" 238 | columns = {} 239 | for key, value in formdata.items(): 240 | if key.startswith("column-name"): 241 | idx = key.split(".")[-1] 242 | columns[idx] = {"name": value} 243 | elif key.startswith("column-type"): 244 | idx = key.split(".")[-1] 245 | columns[idx]["type"] = value 246 | elif key.startswith("column-sort"): 247 | idx = key.split(".")[-1] 248 | columns[idx]["sort"] = int(value) 249 | 250 | # Sort columns based on sort order 251 | sorted_columns = sorted(columns.values(), key=lambda x: x["sort"]) 252 | 253 | # Dictionary to use with .create() 254 | primary_key_name = formdata["primary_key_name"].strip() 255 | create = {primary_key_name: REV_TYPES[formdata["primary_key_type"]]} 256 | 257 | for column in sorted_columns: 258 | if column["name"].strip(): 259 | create[column["name"].strip()] = REV_TYPES[column["type"]] 260 | 261 | def create_the_table(conn): 262 | db = sqlite_utils.Database(conn) 263 | if not table_name.strip(): 264 | return None, "Table name is required" 265 | if db[table_name].exists(): 266 | return None, "Table already exists" 267 | try: 268 | db[table_name].create( 269 | create, pk=primary_key_name, not_null=(primary_key_name,) 270 | ) 271 | return db[table_name].schema, None 272 | except Exception as e: 273 | return None, str(e) 274 | 275 | schema, error = await db.execute_write_fn(create_the_table, block=True) 276 | 277 | if error: 278 | datasette.add_message(request, str(error), datasette.ERROR) 279 | path = request.path 280 | else: 281 | datasette.add_message(request, "Table has been created") 282 | path = datasette.urls.table(database_name, table_name) 283 | await datasette.track_event( 284 | CreateTableEvent( 285 | actor=request.actor, 286 | database=database_name, 287 | table=table_name, 288 | schema=schema, 289 | ) 290 | ) 291 | 292 | return Response.redirect(path) 293 | 294 | return Response.html( 295 | await datasette.render_template( 296 | "edit_schema_create_table.html", 297 | { 298 | "database": db, 299 | "columns": [{"name": "Column {}".format(i)} for i in range(1, 10)], 300 | "types": [ 301 | {"name": TYPE_NAMES[value], "value": value} 302 | for value in TYPES.values() 303 | ], 304 | }, 305 | request=request, 306 | ) 307 | ) 308 | 309 | 310 | async def edit_schema_table(request, datasette): 311 | table = tilde_decode(request.url_vars["table"]) 312 | databases = get_databases(datasette) 313 | database_name = request.url_vars["database"] 314 | 315 | if not await can_alter_table(datasette, request.actor, database_name, table): 316 | raise Forbidden("Permission denied for alter-table") 317 | 318 | try: 319 | database = [db for db in databases if db.name == database_name][0] 320 | except IndexError: 321 | raise NotFound("Database not found") 322 | if not await database.table_exists(table): 323 | raise NotFound("Table not found") 324 | 325 | if request.method == "POST": 326 | 327 | def get_schema(conn): 328 | table_obj = sqlite_utils.Database(conn)[table] 329 | if not table_obj.exists(): 330 | return None 331 | return table_obj.schema 332 | 333 | before_schema = await database.execute_fn(get_schema) 334 | 335 | async def track_analytics(): 336 | after_schema = await database.execute_fn(get_schema) 337 | # Don't track drop tables, which happen when after_schema is None 338 | if after_schema is not None and after_schema != before_schema: 339 | await datasette.track_event( 340 | AlterTableEvent( 341 | actor=request.actor, 342 | database=database_name, 343 | table=table, 344 | before_schema=before_schema, 345 | after_schema=after_schema, 346 | ) 347 | ) 348 | 349 | formdata = await request.post_vars() 350 | if formdata.get("action") == "update_columns": 351 | types = {} 352 | rename = {} 353 | drop = set() 354 | order_pairs = [] 355 | 356 | def get_columns(conn): 357 | return [ 358 | {"name": column, "type": dtype} 359 | for column, dtype in sqlite_utils.Database(conn)[ 360 | table 361 | ].columns_dict.items() 362 | ] 363 | 364 | existing_columns = await database.execute_fn(get_columns) 365 | 366 | for column_details in existing_columns: 367 | column = column_details["name"] 368 | new_name = formdata.get("name.{}".format(column)) 369 | if new_name and new_name != column: 370 | rename[column] = new_name 371 | if formdata.get("delete.{}".format(column)): 372 | drop.add(column) 373 | types[column] = ( 374 | REV_TYPES.get(formdata.get("type.{}".format(column))) 375 | or column_details["type"] 376 | ) 377 | order_pairs.append((column, formdata.get("sort.{}".format(column), 0))) 378 | 379 | order_pairs.sort(key=lambda p: int(p[1])) 380 | 381 | def transform_the_table(conn): 382 | # Run this in a transaction: 383 | with conn: 384 | # We have to read all the views first, because we need to drop and recreate them 385 | db = sqlite_utils.Database(conn) 386 | views = { 387 | v.name: v.schema for v in db.views if table.lower() in v.schema 388 | } 389 | for view in views.keys(): 390 | db[view].drop() 391 | db[table].transform( 392 | types=types, 393 | rename=rename, 394 | drop=drop, 395 | column_order=[p[0] for p in order_pairs], 396 | ) 397 | # Now recreate the views 398 | for schema in views.values(): 399 | db.execute(schema) 400 | 401 | await database.execute_write_fn(transform_the_table, block=True) 402 | 403 | datasette.add_message(request, "Changes to table have been saved") 404 | await track_analytics() 405 | return Response.redirect(request.path) 406 | 407 | if formdata.get("action") == "update_foreign_keys": 408 | response = await update_foreign_keys( 409 | request, datasette, database, table, formdata 410 | ) 411 | elif formdata.get("action") == "update_primary_key": 412 | response = await update_primary_key( 413 | request, datasette, database, table, formdata 414 | ) 415 | elif "drop_table" in formdata: 416 | response = await drop_table(request, datasette, database, table) 417 | elif "add_column" in formdata: 418 | response = await add_column(request, datasette, database, table, formdata) 419 | elif "rename_table" in formdata: 420 | response = await rename_table(request, datasette, database, table, formdata) 421 | elif "add_index" in formdata: 422 | column = formdata.get("add_index_column") or "" 423 | unique = formdata.get("add_index_unique") 424 | response = await add_index( 425 | request, datasette, database, table, column, unique 426 | ) 427 | elif any(key.startswith("drop_index_") for key in formdata.keys()): 428 | response = await drop_index(request, datasette, database, table, formdata) 429 | else: 430 | response = Response.html("Unknown operation", status=400) 431 | await track_analytics() 432 | return response 433 | 434 | def get_columns_and_schema_and_fks_and_pks_and_indexes(conn): 435 | db = sqlite_utils.Database(conn) 436 | t = db[table] 437 | pks = set(t.pks) 438 | columns = [ 439 | {"name": column, "type": dtype, "is_pk": column in pks} 440 | for column, dtype in t.columns_dict.items() 441 | ] 442 | # Include the index declarations in the schema as well 443 | schema = db.execute( 444 | textwrap.dedent( 445 | """ 446 | select group_concat(sql, '; 447 | ') from sqlite_master where tbl_name = ? 448 | order by type desc 449 | """ 450 | ), 451 | [table], 452 | ).fetchone()[0] 453 | return columns, schema, t.foreign_keys, t.pks, t.indexes 454 | 455 | columns, schema, foreign_keys, pks, indexes = await database.execute_fn( 456 | get_columns_and_schema_and_fks_and_pks_and_indexes 457 | ) 458 | foreign_keys_by_column = {} 459 | for fk in foreign_keys: 460 | foreign_keys_by_column.setdefault(fk.column, []).append(fk) 461 | 462 | # Load example data for the columns - truncated first five non-blank values 463 | column_examples = await database.execute_fn( 464 | lambda conn: examples_for_columns(conn, table) 465 | ) 466 | 467 | columns_display = [ 468 | { 469 | "name": c["name"], 470 | "type": TYPES[c["type"]], 471 | "examples": column_examples.get(c["name"]) or {}, 472 | } 473 | for c in columns 474 | ] 475 | 476 | # To detect potential foreign keys we need (table, column) for the 477 | # primary keys on every other table 478 | other_primary_keys = [ 479 | pair for pair in await database.execute_fn(get_primary_keys) if pair[0] != table 480 | ] 481 | integer_primary_keys = [ 482 | (pair[0], pair[1]) for pair in other_primary_keys if pair[2] is int 483 | ] 484 | string_primary_keys = [ 485 | (pair[0], pair[1]) for pair in other_primary_keys if pair[2] is str 486 | ] 487 | 488 | all_columns_to_manage_foreign_keys = [ 489 | { 490 | "name": column["name"], 491 | "foreign_key": ( 492 | foreign_keys_by_column.get(column["name"])[0] 493 | if foreign_keys_by_column.get(column["name"]) 494 | else None 495 | ), 496 | "suggestions": [], 497 | "options": ( 498 | integer_primary_keys if column["type"] is int else string_primary_keys 499 | ), 500 | } 501 | for column in columns 502 | ] 503 | 504 | # Anything not a float or an existing PK could be the next PK, but 505 | # for smaller tables we cut those down to just unique columns 506 | potential_pks = [ 507 | c["name"] for c in columns if c["type"] is not float and not c["is_pk"] 508 | ] 509 | potential_fks = [] 510 | # Only scan for potential foreign keys if there are less than 10,000 511 | # rows - since execute_fn() does not yet support time limits 512 | limited_count = ( 513 | await database.execute( 514 | 'select count(*) from (select 1 from "{}" limit {})'.format( 515 | table, FOREIGN_KEY_DETECTION_LIMIT 516 | ) 517 | ) 518 | ).single_value() 519 | if limited_count and limited_count < FOREIGN_KEY_DETECTION_LIMIT: 520 | potential_fks = await database.execute_fn( 521 | lambda conn: potential_foreign_keys( 522 | conn, 523 | table, 524 | [c["name"] for c in columns if not c["is_pk"]], 525 | other_primary_keys, 526 | ) 527 | ) 528 | for info in all_columns_to_manage_foreign_keys: 529 | info["suggestions"] = potential_fks.get(info["name"], []) 530 | # Now do potential primary keys against non-float columns 531 | non_float_columns = [ 532 | c["name"] for c in columns if c["type"] is not float and not c["is_pk"] 533 | ] 534 | potential_pks = await database.execute_fn( 535 | lambda conn: potential_primary_keys(conn, table, non_float_columns) 536 | ) 537 | 538 | # Add 'options' to those 539 | for info in all_columns_to_manage_foreign_keys: 540 | options = [] 541 | seen = set() 542 | info["html_options"] = options 543 | # Reshuffle so suggestions are at the top 544 | if info["foreign_key"]: 545 | options.append( 546 | { 547 | "name": "{}.{} (current)".format( 548 | info["foreign_key"].other_table, 549 | info["foreign_key"].other_column, 550 | ), 551 | "value": "{}.{}".format( 552 | tilde_encode(info["foreign_key"].other_table), 553 | tilde_encode(info["foreign_key"].other_column), 554 | ), 555 | "selected": True, 556 | } 557 | ) 558 | seen.add( 559 | (info["foreign_key"].other_table, info["foreign_key"].other_column) 560 | ) 561 | # Now add suggestions 562 | for suggested_table, suggested_column in info["suggestions"]: 563 | if not ( 564 | info["foreign_key"] 565 | and info["foreign_key"].other_column == suggested_column 566 | ): 567 | options.append( 568 | { 569 | "name": "{}.{} (suggested)".format( 570 | suggested_table, suggested_column 571 | ), 572 | "value": "{}.{}".format( 573 | tilde_encode(suggested_table), 574 | tilde_encode(suggested_column), 575 | ), 576 | "selected": False, 577 | } 578 | ) 579 | seen.add((suggested_table, suggested_column)) 580 | info["suggested"] = "{}.{}".format(suggested_table, suggested_column) 581 | # And the rest 582 | for rest_table, rest_column in info["options"]: 583 | if (rest_table, rest_column) not in seen: 584 | options.append( 585 | { 586 | "name": "{}.{}".format(rest_table, rest_column), 587 | "value": "{}.{}".format( 588 | tilde_encode(rest_table), tilde_encode(rest_column) 589 | ), 590 | "selected": False, 591 | } 592 | ) 593 | 594 | # Don't let users drop sqlite_autoindex_* indexes 595 | existing_indexes = [ 596 | index for index in indexes if not index.name.startswith("sqlite_autoindex_") 597 | ] 598 | # Only allow index creation on non-primary-key columns 599 | non_primary_key_columns = [c for c in columns if not c["is_pk"]] 600 | 601 | return Response.html( 602 | await datasette.render_template( 603 | "edit_schema_table.html", 604 | { 605 | "database": database, 606 | "table": table, 607 | "columns": columns_display, 608 | "schema": schema, 609 | "types": [ 610 | {"name": TYPE_NAMES[value], "value": value} 611 | for value in TYPES.values() 612 | ], 613 | "foreign_keys": foreign_keys, 614 | "all_columns_to_manage_foreign_keys": all_columns_to_manage_foreign_keys, 615 | "potential_pks": potential_pks, 616 | "is_rowid_table": bool(pks == ["rowid"]), 617 | "current_pk": pks[0] if len(pks) == 1 else None, 618 | "existing_indexes": existing_indexes, 619 | "non_primary_key_columns": non_primary_key_columns, 620 | "can_drop_table": await can_drop_table( 621 | datasette, request.actor, database_name, table 622 | ), 623 | "can_rename_table": await can_rename_table( 624 | datasette, request.actor, database_name, table 625 | ), 626 | "tilde_encode": tilde_encode, 627 | }, 628 | request=request, 629 | ) 630 | ) 631 | 632 | 633 | async def drop_table(request, datasette, database, table): 634 | if not await can_drop_table(datasette, request.actor, database.name, table): 635 | raise Forbidden("Permission denied for drop-table") 636 | 637 | def do_drop_table(conn): 638 | db = sqlite_utils.Database(conn) 639 | db[table].disable_fts() 640 | db[table].drop() 641 | db.vacuum() 642 | 643 | if hasattr(database, "execute_isolated_fn"): 644 | await database.execute_isolated_fn(do_drop_table) 645 | # For the tests 646 | datasette._datasette_edit_schema_used_execute_isolated_fn = True 647 | else: 648 | await database.execute_write_fn(do_drop_table) 649 | 650 | datasette.add_message(request, "Table has been deleted") 651 | await datasette.track_event( 652 | DropTableEvent( 653 | actor=request.actor, 654 | database=database.name, 655 | table=table, 656 | ) 657 | ) 658 | return Response.redirect("/-/edit-schema/" + database.name) 659 | 660 | 661 | async def add_column(request, datasette, database, table, formdata): 662 | name = formdata["name"] 663 | type = formdata["type"] 664 | 665 | redirect = Response.redirect( 666 | "/-/edit-schema/{}/{}".format(quote_plus(database.name), quote_plus(table)) 667 | ) 668 | 669 | if not name: 670 | datasette.add_message(request, "Column name is required", datasette.ERROR) 671 | return redirect 672 | 673 | if type.upper() not in REV_TYPES: 674 | datasette.add_message(request, "Invalid type: {}".format(type), datasette.ERROR) 675 | return redirect 676 | 677 | def do_add_column(conn): 678 | db = sqlite_utils.Database(conn) 679 | db[table].add_column(name, REV_TYPES[type.upper()]) 680 | 681 | error = None 682 | try: 683 | await datasette.databases[database.name].execute_write_fn( 684 | do_add_column, block=True 685 | ) 686 | except sqlite3.OperationalError as e: 687 | if "duplicate column name" in str(e): 688 | error = "A column called '{}' already exists".format(name) 689 | else: 690 | error = str(e) 691 | 692 | if error: 693 | datasette.add_message(request, error, datasette.ERROR) 694 | else: 695 | datasette.add_message(request, "Column has been added") 696 | return redirect 697 | 698 | 699 | async def rename_table(request, datasette, database, table, formdata): 700 | new_name = formdata.get("name", "").strip() 701 | redirect = Response.redirect( 702 | "/-/edit-schema/{}/{}".format(quote_plus(database.name), quote_plus(table)) 703 | ) 704 | if not new_name: 705 | datasette.add_message(request, "New table name is required", datasette.ERROR) 706 | return redirect 707 | if new_name == table: 708 | datasette.add_message(request, "Table name was the same", datasette.WARNING) 709 | return redirect 710 | 711 | existing_tables = await database.table_names() 712 | if new_name in existing_tables: 713 | datasette.add_message( 714 | request, 715 | "A table called '{}' already exists".format(new_name), 716 | datasette.ERROR, 717 | ) 718 | return redirect 719 | 720 | # User must have drop-table permission on old table and create-table on new table 721 | if not await can_rename_table(datasette, request.actor, database.name, table): 722 | datasette.add_message( 723 | request, 724 | "Permission denied to rename table '{}'".format(table), 725 | datasette.ERROR, 726 | ) 727 | return redirect 728 | 729 | try: 730 | before_schema = await database.execute_fn( 731 | lambda conn: sqlite_utils.Database(conn)[table].schema 732 | ) 733 | await database.execute_write( 734 | """ 735 | ALTER TABLE [{}] RENAME TO [{}]; 736 | """.format( 737 | table, new_name 738 | ), 739 | block=True, 740 | ) 741 | after_schema = await database.execute_fn( 742 | lambda conn: sqlite_utils.Database(conn)[new_name].schema 743 | ) 744 | datasette.add_message( 745 | request, "Table renamed to '{}'".format(new_name), datasette.INFO 746 | ) 747 | await datasette.track_event( 748 | AlterTableEvent( 749 | actor=request.actor, 750 | database=database.name, 751 | table=new_name, 752 | before_schema=before_schema, 753 | after_schema=after_schema, 754 | ) 755 | ) 756 | 757 | except Exception as error: 758 | datasette.add_message( 759 | request, "Error renaming table: {}".format(str(error)), datasette.ERROR 760 | ) 761 | return redirect 762 | return Response.redirect( 763 | "/-/edit-schema/{}/{}".format(quote_plus(database.name), quote_plus(new_name)) 764 | ) 765 | 766 | 767 | async def update_foreign_keys(request, datasette, database, table, formdata): 768 | new_fks = { 769 | key[3:]: value 770 | for key, value in formdata.items() 771 | if key.startswith("fk.") and value.strip() 772 | } 773 | existing_fks = { 774 | fk.column: fk.other_table + "." + fk.other_column 775 | for fk in await database.execute_fn( 776 | lambda conn: sqlite_utils.Database(conn)[table].foreign_keys 777 | ) 778 | } 779 | if new_fks == existing_fks: 780 | datasette.add_message(request, "No changes to foreign keys", datasette.WARNING) 781 | return Response.redirect(request.path) 782 | 783 | # Need that in (column, other_table, other_column) format 784 | fks = [] 785 | for column, other_table_and_column in new_fks.items(): 786 | split = other_table_and_column.split(".") 787 | fks.append( 788 | ( 789 | column, 790 | tilde_decode(split[0]), 791 | tilde_decode(split[1]), 792 | ) 793 | ) 794 | 795 | # Update foreign keys 796 | def run(conn): 797 | db = sqlite_utils.Database(conn) 798 | with conn: 799 | db[table].transform(foreign_keys=fks) 800 | 801 | await database.execute_write_fn(run, block=True) 802 | summary = ", ".join("{} → {}.{}".format(*fk) for fk in fks) 803 | if summary: 804 | message = "Foreign keys updated{}".format( 805 | " to {}".format(summary) if summary else "" 806 | ) 807 | else: 808 | message = "Foreign keys removed" 809 | datasette.add_message( 810 | request, 811 | message, 812 | ) 813 | return Response.redirect(request.path) 814 | 815 | 816 | async def update_primary_key(request, datasette, database, table, formdata): 817 | primary_key = formdata["primary_key"] 818 | if not primary_key: 819 | datasette.add_message(request, "Primary key is required", datasette.ERROR) 820 | return Response.redirect(request.path) 821 | 822 | def run(conn): 823 | db = sqlite_utils.Database(conn) 824 | with conn: 825 | if primary_key not in db[table].columns_dict: 826 | return "Column '{}' does not exist".format(primary_key) 827 | # Make sure it's unique 828 | sql = 'select count(*) - count(distinct("{}")) from "{}"'.format( 829 | primary_key, table 830 | ) 831 | should_be_zero = db.execute(sql).fetchone()[0] 832 | if should_be_zero: 833 | return "Column '{}' is not unique".format(primary_key) 834 | db[table].transform(pk=primary_key) 835 | return None 836 | 837 | error = await database.execute_write_fn(run, block=True) 838 | if error: 839 | datasette.add_message(request, error, datasette.ERROR) 840 | else: 841 | datasette.add_message( 842 | request, 843 | "Primary key for '{}' is now '{}'".format( 844 | table, 845 | formdata["primary_key"], 846 | ), 847 | ) 848 | return Response.redirect(request.path) 849 | 850 | 851 | async def add_index(request, datasette, database, table, column, unique): 852 | if not column: 853 | datasette.add_message(request, "Column name is required", datasette.ERROR) 854 | return Response.redirect(request.path) 855 | 856 | def run(conn): 857 | db = sqlite_utils.Database(conn) 858 | with conn: 859 | db[table].create_index([column], find_unique_name=True, unique=unique) 860 | 861 | try: 862 | await database.execute_write_fn(run, block=True) 863 | message = "Index added on " 864 | if unique: 865 | message = "Unique index added on " 866 | message += column 867 | datasette.add_message(request, message) 868 | except Exception as e: 869 | datasette.add_message(request, str(e), datasette.ERROR) 870 | return Response.redirect(request.path) 871 | 872 | 873 | async def drop_index(request, datasette, database, table, formdata): 874 | to_drops = [ 875 | key[len("drop_index_") :] 876 | for key in formdata.keys() 877 | if key.startswith("drop_index_") 878 | ] 879 | if to_drops: 880 | to_drop = to_drops[0] 881 | 882 | def run(conn): 883 | with conn: 884 | conn.execute("DROP INDEX [{}]".format(to_drop)) 885 | 886 | try: 887 | await database.execute_write_fn(run, block=True) 888 | datasette.add_message(request, "Index dropped: {}".format(to_drop)) 889 | except Exception as e: 890 | datasette.add_message(request, str(e), datasette.ERROR) 891 | else: 892 | datasette.add_message(request, "No index name provided", datasette.ERROR) 893 | return Response.redirect(request.path) 894 | -------------------------------------------------------------------------------- /datasette_edit_schema/templates/edit_schema_create_table.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Create a table in {{ database.name }}{% endblock %} 4 | 5 | {% block extra_head %} 6 | 69 | 70 | 75 | {% endblock %} 76 | 77 | {% block crumbs %} 78 | {{ crumbs.nav(request=request, database=database.name) }} 79 | {% endblock %} 80 | 81 | {% block content %} 82 |

Create a table in {{ database.name }}

83 | 84 | 85 |

86 | 87 | 88 |

89 |

Columns

90 |

If the primary key is an integer it will automatically count up from 1

91 |
    92 | 93 |
  • 94 | 95 | 99 |  Primary key 100 |
  • 101 |
102 |
    103 | {% for column in columns %} 104 |
  • 105 | 106 | 111 | 114 | 115 |
  • 116 | {% endfor %} 117 |
118 |

119 | 120 | 121 |

122 | 123 | 124 | 136 | 137 | {% endblock %} 138 | -------------------------------------------------------------------------------- /datasette_edit_schema/templates/edit_schema_database.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Edit tables in {{ database.name }}.db{% endblock %} 4 | 5 | {% block crumbs %} 6 | {{ crumbs.nav(request=request, database=database.name) }} 7 | {% endblock %} 8 | 9 | {% block content %} 10 |

Edit tables in {{ database.name }}.db

11 | 12 | {% for table in tables %} 13 |

{{ table.name }}

14 |

{% for column in table.columns %}{{ column.name }}{% if not loop.last %}, {% endif %}{% endfor %}

15 | {% endfor %} 16 | 17 | {% endblock %} 18 | -------------------------------------------------------------------------------- /datasette_edit_schema/templates/edit_schema_index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Edit schema{% endblock %} 4 | 5 | {% block extra_head %} 6 | 7 | {% endblock %} 8 | 9 | {% block content %} 10 |

Edit schema

11 | 12 | {% if databases %} 13 |

Select a database to edit:

14 | 15 |
    16 | {% for database in databases %} 17 |
  • {{ database }}
  • 18 | {% endfor %} 19 |
20 | {% else %} 21 |

You do not have any writable database files attached.

22 | {% endif %} 23 | 24 | {% endblock %} 25 | -------------------------------------------------------------------------------- /datasette_edit_schema/templates/edit_schema_table.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Edit table {{ table }} in {{ database.name }}{% endblock %} 4 | 5 | {% block extra_head %} 6 | 77 | 78 | 83 | {% endblock %} 84 | 85 | {% block crumbs %} 86 | {{ crumbs.nav(request=request, database=database.name, table=table) }} 87 | {% endblock %} 88 | 89 | {% block content %} 90 |

Edit table {{ database.name }}/{{ table }}

91 | 92 | {% if can_rename_table %} 93 |

Rename table

94 | 95 | 96 | 97 |

98 | 99 | 100 | 101 | {% endif %} 102 | 103 |
104 |

Change existing columns

105 |
    106 | {% for column in columns %} 107 |
  • 108 | 109 | 114 | 117 | 120 | 121 | {% if column.examples %} 122 |
    {{ ", ".join(column.examples) }}
    123 | {% else %} 124 |
    - no example values -
    125 | {% endif %} 126 |
  • 127 | {% endfor %} 128 |
129 |

130 | 131 | 132 | 133 |

134 | 135 | 136 |

Add a column

137 | 138 | 139 | 140 | 141 |

142 |

147 | 148 | 149 | 150 |

Update foreign key relationships

151 | 152 |

Configure foreign keys on columns so Datasette can link related tables together.

153 | 154 | 159 | 160 | 161 | 162 | 163 |
164 | {% for column in all_columns_to_manage_foreign_keys %} 165 | 166 | 167 | 173 | 174 | {% endfor %} 175 |
171 | {% if column.suggested %}

Suggested: {{ column.suggested }}

{% endif %} 172 |
176 | 177 | 178 | 179 | {% if potential_pks %} 180 |

{% if is_rowid_table %}Set a primary key{% else %}Change the primary key{% endif %}

181 | 182 |

The primary key column uniquely identifies each row in the table.

183 | 184 |
185 | 186 | 187 | 188 | 196 |

197 |
198 | {% endif %} 199 | 200 | {% if non_primary_key_columns or existing_indexes %} 201 |

Table indexes

202 | 203 |

Indexes can speed up filter and sort operations against indexed columns.

204 | 205 |
206 | 207 | {% if non_primary_key_columns %} 208 |

215 | 216 | 217 |

218 | {% endif %} 219 | {% if existing_indexes %} 220 |

Existing indexes

221 | {% for index in existing_indexes %} 222 |

223 | {{ index.name }} 224 | {% if index.unique %} (unique){% endif %} 225 | on column{{ 's' if index.columns[1:] else '' }} 226 | {{ index.columns|join(', ') }} 227 | 228 |

229 | {% endfor %} 230 | {% endif %} 231 |
232 | {% endif %} 233 | 234 | {% if can_drop_table %} 235 |

Drop table

236 | 237 |
238 | 239 | 240 | 241 |
242 | {% endif %} 243 | 244 |

Current table schema

245 |
{{ schema }}
246 | 247 | 266 | 267 | {% endblock %} 268 | -------------------------------------------------------------------------------- /datasette_edit_schema/utils.py: -------------------------------------------------------------------------------- 1 | import sqlite_utils 2 | import json 3 | 4 | 5 | def get_primary_keys(conn): 6 | db = sqlite_utils.Database(conn) 7 | primary_keys = [] 8 | for table in db.tables: 9 | if "_fts_" in table.name: 10 | continue 11 | pks = table.pks 12 | if pks == ["rowid"]: 13 | continue 14 | if len(pks) != 1: 15 | continue 16 | pk = pks[0] 17 | # Is that a str or int? 18 | pk_type = table.columns_dict[pk] 19 | if pk_type in (str, int): 20 | primary_keys.append((table.name, pk, pk_type)) 21 | return primary_keys 22 | 23 | 24 | def potential_foreign_keys(conn, table_name, columns, other_table_pks): 25 | potentials = {} 26 | cursor = conn.cursor() 27 | for column in columns: 28 | potentials[column] = [] 29 | for other_table, other_column, _ in other_table_pks: 30 | # Search for a value in this column that does not exist in the other table, 31 | # terminate early as soon as we find one since that shows this is not a 32 | # good foreign key candidate. 33 | query = """ 34 | select "{table}"."{column}" 35 | from "{table}" 36 | where not exists ( 37 | select 1 38 | from "{other_table}" 39 | where "{table}"."{column}" = "{other_table}"."{other_column}" 40 | ) 41 | limit 1; 42 | """.format( 43 | table=table_name, 44 | column=column, 45 | other_table=other_table, 46 | other_column=other_column, 47 | ) 48 | cursor.execute(query) 49 | if cursor.fetchone() is None: 50 | potentials[column].append((other_table, other_column)) 51 | return potentials 52 | 53 | 54 | def potential_primary_keys(conn, table_name, columns, max_string_len=128): 55 | # First we run a query to check the max length of each column + if it has any nulls 56 | if not columns: 57 | return [] 58 | selects = [] 59 | for column in columns: 60 | selects.append('max(length("{}")) as "maxlen.{}"'.format(column, column)) 61 | selects.append( 62 | 'sum(case when "{}" is null then 1 else 0 end) as "nulls.{}"'.format( 63 | column, column 64 | ) 65 | ) 66 | sql = 'select {} from "{}"'.format(", ".join(selects), table_name) 67 | cursor = conn.cursor() 68 | cursor.execute(sql) 69 | row = cursor.fetchone() 70 | potential_columns = [] 71 | for i, column in enumerate(columns): 72 | maxlen = row[i * 2] or 0 73 | nulls = row[i * 2 + 1] or 0 74 | if maxlen < max_string_len and nulls == 0: 75 | potential_columns.append(column) 76 | if not potential_columns: 77 | return [] 78 | # Count distinct values in each of our candidate columns 79 | selects = ["count(*) as _count"] 80 | for column in potential_columns: 81 | selects.append('count(distinct "{}") as "distinct.{}"'.format(column, column)) 82 | sql = 'select {} from "{}"'.format(", ".join(selects), table_name) 83 | cursor.execute(sql) 84 | row = cursor.fetchone() 85 | count = row[0] 86 | potential_pks = [] 87 | for i, column in enumerate(potential_columns): 88 | distinct = row[i + 1] 89 | if distinct == count: 90 | potential_pks.append(column) 91 | return potential_pks 92 | 93 | 94 | def examples_for_columns(conn, table_name): 95 | columns = sqlite_utils.Database(conn)[table_name].columns_dict.keys() 96 | ctes = [f'rows as (select * from "{table_name}" limit 1000)'] 97 | unions = [] 98 | params = [] 99 | for i, column in enumerate(columns): 100 | ctes.append( 101 | f'col{i} as (select distinct "{column}" from rows ' 102 | f'where ("{column}" is not null and "{column}" != "") limit 5)' 103 | ) 104 | unions.append(f'select ? as label, "{column}" as value from col{i}') 105 | params.append(column) 106 | ctes.append("strings as ({})".format("\nunion all\n".join(unions))) 107 | ctes.append( 108 | """ 109 | truncated_strings as ( 110 | select 111 | label, 112 | case 113 | when length(value) > 30 then substr(value, 1, 30) || '...' 114 | else value 115 | end as value 116 | from strings 117 | where typeof(value) != 'blob' 118 | ) 119 | """ 120 | ) 121 | sql = ( 122 | "with {ctes} ".format(ctes=",\n".join(ctes)) 123 | + "select label, json_group_array(value) as examples " 124 | "from truncated_strings group by label" 125 | ) 126 | output = {} 127 | for column, examples in conn.execute(sql, params).fetchall(): 128 | output[column] = list(map(str, json.loads(examples))) 129 | return output 130 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "datasette-edit-schema" 3 | version = "0.8a3" 4 | description = "Datasette plugin for modifying table schemas" 5 | readme = "README.md" 6 | authors = [{name = "Simon Willison"}] 7 | license = {text = "Apache-2.0"} 8 | classifiers=[ 9 | "Framework :: Datasette", 10 | "License :: OSI Approved :: Apache Software License" 11 | ] 12 | requires-python = ">=3.8" 13 | dependencies = [ 14 | "datasette>=1.0a13", 15 | "sqlite-utils>=3.35", 16 | ] 17 | 18 | [project.urls] 19 | Homepage = "https://datasette.io/plugins/datasette-edit-schema" 20 | Changelog = "https://github.com/simonw/datasette-edit-schema/releases" 21 | Issues = "https://github.com/simonw/datasette-edit-schema/issues" 22 | CI = "https://github.com/simonw/datasette-edit-schema/actions" 23 | 24 | [project.entry-points.datasette] 25 | edit_schema = "datasette_edit_schema" 26 | 27 | [project.optional-dependencies] 28 | test = ["pytest", "pytest-asyncio", "beautifulsoup4", "html5lib"] 29 | 30 | [tool.pytest.ini_options] 31 | asyncio_mode = "strict" 32 | 33 | [tool.setuptools] 34 | packages = ["datasette_edit_schema"] 35 | 36 | [tool.setuptools.package-data] 37 | datasette_edit_schema = ["templates/*", "static/*"] 38 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simonw/datasette-edit-schema/e393cda78f126fb80477690626754dd94a44d3a7/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from datasette import hookimpl 3 | from datasette.app import Datasette 4 | from datasette.plugins import pm 5 | import pytest 6 | import sqlite_utils 7 | 8 | 9 | @pytest.fixture 10 | def db_and_path(tmpdir): 11 | path = str(tmpdir / "data.db") 12 | db = sqlite_utils.Database(path) 13 | db["creatures"].insert_all( 14 | [ 15 | {"name": "Cleo", "description": "A medium sized dog"}, 16 | {"name": "Siroco", "description": "A troublesome Kakapo"}, 17 | ] 18 | ) 19 | db["other_table"].insert({"foo": "bar"}) 20 | db["empty_table"].create({"id": int, "name": str}, pk="id") 21 | # Tables for testing foreign key editing 22 | db["museums"].insert_all( 23 | [ 24 | { 25 | "id": "moma", 26 | "name": "Museum of Modern Art", 27 | "city_id": "nyc", 28 | }, 29 | { 30 | "id": "tate", 31 | "name": "Tate Modern", 32 | "city_id": "london", 33 | }, 34 | { 35 | "id": "exploratorium", 36 | "name": "Exploratorium", 37 | "city_id": "sf", 38 | }, 39 | { 40 | "id": "cablecars", 41 | "name": "Cable Car Museum", 42 | "city_id": "sf", 43 | }, 44 | ], 45 | pk="id", 46 | ) 47 | db["cities"].insert_all( 48 | [ 49 | { 50 | "id": "nyc", 51 | "name": "New York City", 52 | }, 53 | { 54 | "id": "london", 55 | "name": "London", 56 | }, 57 | { 58 | "id": "sf", 59 | "name": "San Francisco", 60 | }, 61 | ], 62 | pk="id", 63 | ) 64 | db["distractions"].insert_all( 65 | [ 66 | { 67 | "id": "nyc", 68 | "name": "Nice Yummy Cake", 69 | } 70 | ], 71 | pk="id", 72 | ) 73 | db["has_foreign_keys"].insert( 74 | { 75 | "id": 1, 76 | "distraction_id": "nyc", 77 | }, 78 | pk="id", 79 | foreign_keys=(("distraction_id", "distractions"),), 80 | ) 81 | db["has_indexes"].insert( 82 | { 83 | "id": 1, 84 | "name": "Cleo", 85 | "description": "A medium sized dog", 86 | }, 87 | pk="id", 88 | ) 89 | db["has_indexes"].create_index(["name"], index_name="name_index") 90 | db["has_indexes"].create_index( 91 | ["name"], index_name="name_unique_index", unique=True 92 | ) 93 | db["animal.name/with/slashes"].insert( 94 | { 95 | "id": 1, 96 | "name": "Sirocco", 97 | "species": 582, 98 | }, 99 | pk="id", 100 | ) 101 | db["table.name/with/slashes.categories"].insert( 102 | { 103 | "id": 582, 104 | "name": "Kākāpō", 105 | }, 106 | pk="id", 107 | ) 108 | 109 | return db, path 110 | 111 | 112 | @pytest.fixture 113 | def db_path(db_and_path): 114 | return db_and_path[1] 115 | 116 | 117 | @pytest.fixture 118 | def db(db_and_path): 119 | return db_and_path[0] 120 | 121 | 122 | @pytest.fixture 123 | def ds(db_path): 124 | return Datasette([db_path]) 125 | 126 | 127 | @dataclass 128 | class Rule: 129 | actor_id: str 130 | action: str 131 | database: str = None 132 | resource: str = None 133 | 134 | 135 | @pytest.fixture 136 | def rule(): 137 | return Rule 138 | 139 | 140 | @pytest.fixture 141 | def permission_plugin(): 142 | class PermissionPlugin: 143 | __name__ = "PermissionPlugin" 144 | 145 | # Use hookimpl and method names to register hooks 146 | @hookimpl 147 | def permission_allowed(self, datasette, actor, action, resource): 148 | if not actor: 149 | return None 150 | database_name = None 151 | resource_name = None 152 | if isinstance(resource, str): 153 | database_name = resource 154 | elif resource: 155 | database_name, resource_name = resource 156 | to_match = Rule( 157 | actor_id=actor["id"], 158 | action=action, 159 | database=database_name, 160 | resource=resource_name, 161 | ) 162 | if to_match in getattr(datasette, "_rules_allow", []): 163 | return True 164 | elif to_match in getattr(datasette, "_rules_deny", []): 165 | return False 166 | return None 167 | 168 | pm.register(PermissionPlugin(), name="undo_permission_plugin") 169 | yield 170 | pm.unregister(name="undo_permission_plugin") 171 | 172 | 173 | class TrackEventPlugin: 174 | __name__ = "TrackEventPlugin" 175 | 176 | @hookimpl 177 | def track_event(self, datasette, event): 178 | datasette._tracked_events = getattr(datasette, "_tracked_events", []) 179 | datasette._tracked_events.append(event) 180 | 181 | 182 | @pytest.fixture(scope="session", autouse=True) 183 | def install_event_tracking_plugin(): 184 | from datasette.plugins import pm 185 | 186 | pm.register(TrackEventPlugin(), name="TrackEventPlugin") 187 | -------------------------------------------------------------------------------- /tests/test_edit_schema.py: -------------------------------------------------------------------------------- 1 | from datasette.app import Datasette 2 | from datasette.utils import tilde_encode 3 | from datasette_edit_schema.utils import ( 4 | potential_foreign_keys, 5 | get_primary_keys, 6 | examples_for_columns, 7 | potential_primary_keys, 8 | ) 9 | import sqlite_utils 10 | import pytest 11 | import re 12 | from bs4 import BeautifulSoup 13 | from .conftest import Rule 14 | 15 | whitespace = re.compile(r"\s+") 16 | 17 | 18 | def get_last_event(datasette): 19 | # Returns None of events are not tracked 20 | events = getattr(datasette, "_tracked_events", []) 21 | if events: 22 | return events[-1] 23 | 24 | 25 | @pytest.mark.asyncio 26 | async def test_csrf_required(db_path): 27 | ds = Datasette([db_path]) 28 | response = await ds.client.post( 29 | "/edit-schema/data/creatures", 30 | data={"drop_table": "1"}, 31 | cookies={"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")}, 32 | ) 33 | assert response.status_code == 403 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "actor_id,should_allow", 38 | ( 39 | (None, False), 40 | ("user_with_edit_schema", True), 41 | ("user_with_create_table", False), 42 | ("user_with_no_perms", False), 43 | ), 44 | ) 45 | @pytest.mark.parametrize("table", ("creatures", "animal.name/with/slashes")) 46 | @pytest.mark.asyncio 47 | async def test_table_actions(permission_plugin, ds, actor_id, should_allow, table): 48 | ds._rules_allow = [ 49 | Rule( 50 | actor_id="user_with_edit_schema", 51 | action="edit-schema", 52 | database="data", 53 | resource=None, 54 | ), 55 | Rule( 56 | actor_id="user_with_create_table", 57 | action="create-table", 58 | database="data", 59 | resource=None, 60 | ), 61 | ] 62 | cookies = None 63 | if actor_id: 64 | cookies = {"ds_actor": ds.sign({"a": {"id": actor_id}}, "actor")} 65 | response = await ds.client.get( 66 | ds.urls.table(database="data", table=table), cookies=cookies 67 | ) 68 | assert response.status_code == 200 69 | fragment = 'Edit table schema'.format( 70 | tilde_encode(table) 71 | ) 72 | if should_allow: 73 | # Should have table action 74 | assert fragment in response.text 75 | else: 76 | assert fragment not in response.text 77 | 78 | 79 | @pytest.mark.asyncio 80 | async def test_post_without_operation_raises_error(db_path): 81 | ds = Datasette([db_path]) 82 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 83 | # Get a csrftoken 84 | csrftoken = ( 85 | await ds.client.get("/-/edit-schema/data/creatures", cookies=cookies) 86 | ).cookies["ds_csrftoken"] 87 | cookies["ds_csrftoken"] = csrftoken 88 | response = await ds.client.post( 89 | "/-/edit-schema/data/creatures", 90 | data={"csrftoken": csrftoken}, 91 | cookies=cookies, 92 | ) 93 | assert response.status_code == 400 94 | 95 | 96 | @pytest.mark.asyncio 97 | @pytest.mark.parametrize( 98 | "actor_id,should_allow", 99 | ( 100 | (None, False), 101 | ("user_with_edit_schema", True), 102 | ("user_with_just_create_table", False), 103 | ("user_with_alter_and_drop_table", True), 104 | ), 105 | ) 106 | async def test_drop_table(permission_plugin, db_path, actor_id, should_allow): 107 | ds = Datasette([db_path], pdb=True) 108 | ds._rules_allow = [ 109 | Rule( 110 | actor_id="user_with_edit_schema", 111 | action="edit-schema", 112 | database="data", 113 | resource=None, 114 | ), 115 | Rule( 116 | actor_id="user_with_alter_and_drop_table", 117 | action="drop-table", 118 | database="data", 119 | resource="creatures", 120 | ), 121 | Rule( 122 | actor_id="user_with_alter_and_drop_table", 123 | action="alter-table", 124 | database="data", 125 | resource="creatures", 126 | ), 127 | Rule( 128 | actor_id="user_with_just_create_table", 129 | action="create-table", 130 | database="data", 131 | resource=None, 132 | ), 133 | ] 134 | db = sqlite_utils.Database(db_path) 135 | assert "creatures" in db.table_names() 136 | cookies = {} 137 | if actor_id: 138 | cookies = {"ds_actor": ds.sign({"a": {"id": actor_id}}, "actor")} 139 | # Get a csrftoken 140 | form_response = await ds.client.get( 141 | "/-/edit-schema/data/creatures", cookies=cookies 142 | ) 143 | if actor_id in (None, "user_with_just_create_table"): 144 | assert form_response.status_code == 403 145 | return 146 | assert form_response.status_code == 200 147 | csrftoken = form_response.cookies["ds_csrftoken"] 148 | if should_allow: 149 | assert 'name="drop_table"' in form_response.text 150 | else: 151 | assert 'name="drop_table"' not in form_response.text 152 | # Try submitting form anyway 153 | response = await ds.client.post( 154 | "/-/edit-schema/data/creatures", 155 | data={"drop_table": "1", "csrftoken": csrftoken}, 156 | cookies=dict(cookies, ds_csrftoken=csrftoken), 157 | ) 158 | if should_allow: 159 | assert response.status_code == 302 160 | assert "creatures" not in db.table_names() 161 | event = get_last_event(ds) 162 | if event is not None: 163 | assert event.name == "drop-table" 164 | # This should have used isolated_fn as well: 165 | assert getattr(ds, "_datasette_edit_schema_used_execute_isolated_fn", None) 166 | else: 167 | assert response.status_code == 403 168 | assert "creatures" in db.table_names() 169 | 170 | 171 | @pytest.mark.asyncio 172 | @pytest.mark.parametrize( 173 | "col_type,expected_type", 174 | [("text", str), ("integer", int), ("real", float), ("blob", bytes)], 175 | ) 176 | async def test_add_column(db_path, col_type, expected_type): 177 | ds = Datasette([db_path]) 178 | db = sqlite_utils.Database(db_path) 179 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 180 | table = db["creatures"] 181 | assert {"name": str, "description": str} == table.columns_dict 182 | # Get a csrftoken 183 | csrftoken = ( 184 | await ds.client.get("/-/edit-schema/data/creatures", cookies=cookies) 185 | ).cookies["ds_csrftoken"] 186 | response = await ds.client.post( 187 | "/-/edit-schema/data/creatures", 188 | data={ 189 | "add_column": "1", 190 | "csrftoken": csrftoken, 191 | "name": "new_col", 192 | "type": col_type, 193 | }, 194 | cookies=dict(cookies, ds_csrftoken=csrftoken), 195 | ) 196 | assert response.status_code == 302 197 | if "ds_messages" in response.cookies: 198 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 199 | # None of these should be errors 200 | assert all(m[1] == Datasette.INFO for m in messages), "Got an error: {}".format( 201 | messages 202 | ) 203 | assert { 204 | "name": str, 205 | "description": str, 206 | "new_col": expected_type, 207 | } == table.columns_dict 208 | 209 | 210 | @pytest.mark.asyncio 211 | @pytest.mark.parametrize( 212 | "name,type,expected_error", 213 | [ 214 | ("name", "text", "A column called 'name' already exists"), 215 | ("", "text", "Column name is required"), 216 | ("]]]", "integer", 'unrecognized token: "]"'), 217 | ("name", "blop", "Invalid type: blop"), 218 | ], 219 | ) 220 | async def test_add_column_errors(db_path, name, type, expected_error): 221 | ds = Datasette([db_path]) 222 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 223 | csrftoken = ( 224 | await ds.client.get("/-/edit-schema/data/creatures", cookies=cookies) 225 | ).cookies["ds_csrftoken"] 226 | response = await ds.client.post( 227 | "/-/edit-schema/data/creatures", 228 | data={ 229 | "add_column": "1", 230 | "name": name, 231 | "type": type, 232 | "csrftoken": csrftoken, 233 | }, 234 | cookies=dict(cookies, ds_csrftoken=csrftoken), 235 | ) 236 | assert response.status_code == 302 237 | assert response.headers["location"] == "/-/edit-schema/data/creatures" 238 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 239 | assert len(messages) == 1 240 | assert messages[0][0] == expected_error 241 | 242 | 243 | @pytest.mark.asyncio 244 | @pytest.mark.parametrize( 245 | "post_data,action,expected_columns_dict,expected_order,expected_message", 246 | [ 247 | # Change column type 248 | ( 249 | { 250 | "type.name": "REAL", 251 | }, 252 | "update_columns", 253 | {"name": float, "description": str}, 254 | ["name", "description"], 255 | "Changes to table have been saved", 256 | ), 257 | ( 258 | { 259 | "type.name": "INTEGER", 260 | }, 261 | "update_columns", 262 | {"name": int, "description": str}, 263 | ["name", "description"], 264 | "Changes to table have been saved", 265 | ), 266 | # Changing order 267 | ( 268 | { 269 | "sort.description": "0", 270 | "sort.name": "2", 271 | }, 272 | "update_columns", 273 | {"name": str, "description": str}, 274 | ["description", "name"], 275 | "Changes to table have been saved", 276 | ), 277 | # Change names 278 | ( 279 | { 280 | "name.name": "name2", 281 | "name.description": "description2", 282 | }, 283 | "update_columns", 284 | {"name2": str, "description2": str}, 285 | ["name2", "description2"], 286 | "Changes to table have been saved", 287 | ), 288 | # Add new columns 289 | ( 290 | { 291 | "add_column": "1", 292 | "name": "new_text", 293 | "type": "text", 294 | }, 295 | None, 296 | {"name": str, "description": str, "new_text": str}, 297 | ["name", "description", "new_text"], 298 | "Column has been added", 299 | ), 300 | ( 301 | { 302 | "add_column": "1", 303 | "name": "new_integer", 304 | "type": "integer", 305 | }, 306 | None, 307 | {"name": str, "description": str, "new_integer": int}, 308 | ["name", "description", "new_integer"], 309 | "Column has been added", 310 | ), 311 | ( 312 | { 313 | "add_column": "1", 314 | "name": "new_float", 315 | "type": "real", 316 | }, 317 | None, 318 | {"name": str, "description": str, "new_float": float}, 319 | ["name", "description", "new_float"], 320 | "Column has been added", 321 | ), 322 | ( 323 | { 324 | "add_column": "1", 325 | "name": "new_blob", 326 | "type": "blob", 327 | }, 328 | None, 329 | {"name": str, "description": str, "new_blob": bytes}, 330 | ["name", "description", "new_blob"], 331 | "Column has been added", 332 | ), 333 | # Drop column 334 | ( 335 | { 336 | "delete.description": "1", 337 | }, 338 | "update_columns", 339 | {"name": str}, 340 | ["name"], 341 | "Changes to table have been saved", 342 | ), 343 | ], 344 | ) 345 | async def test_transform_table( 346 | db_path, action, post_data, expected_columns_dict, expected_order, expected_message 347 | ): 348 | ds = Datasette([db_path]) 349 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 350 | db = sqlite_utils.Database(db_path) 351 | table = db["creatures"] 352 | before_schema = table.schema 353 | assert table.columns_dict == {"name": str, "description": str} 354 | csrftoken = ( 355 | await ds.client.get("/-/edit-schema/data/creatures", cookies=cookies) 356 | ).cookies["ds_csrftoken"] 357 | post_data["csrftoken"] = csrftoken 358 | if action: 359 | post_data["action"] = action 360 | response = await ds.client.post( 361 | "/-/edit-schema/data/creatures", 362 | data=post_data, 363 | cookies=dict(cookies, ds_csrftoken=csrftoken), 364 | ) 365 | assert response.status_code == 302 366 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 367 | assert table.columns_dict == expected_columns_dict 368 | assert [c.name for c in table.columns] == expected_order 369 | assert len(messages) == 1 370 | assert messages[0][0] == expected_message 371 | # Should have tracked an event 372 | event = get_last_event(ds) 373 | assert event.name == "alter-table" 374 | assert event.before_schema == before_schema 375 | assert event.after_schema == table.schema 376 | 377 | 378 | @pytest.mark.asyncio 379 | async def test_drop_column_from_table_that_is_part_of_a_view(db_path): 380 | # https://github.com/simonw/datasette-edit-schema/issues/35 381 | ds = Datasette([db_path], pdb=True) 382 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 383 | db = sqlite_utils.Database(db_path) 384 | db.create_view("creatures_view", "select * from creatures") 385 | table = db["creatures"] 386 | assert table.columns_dict == {"name": str, "description": str} 387 | csrftoken = ( 388 | await ds.client.get("/-/edit-schema/data/creatures", cookies=cookies) 389 | ).cookies["ds_csrftoken"] 390 | post_data = { 391 | "delete.description": "1", 392 | "csrftoken": csrftoken, 393 | "action": "update_columns", 394 | } 395 | response = await ds.client.post( 396 | "/-/edit-schema/data/creatures", 397 | data=post_data, 398 | cookies=dict(cookies, ds_csrftoken=csrftoken), 399 | ) 400 | assert response.status_code == 302 401 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 402 | assert table.columns_dict == {"name": str} 403 | assert [c.name for c in table.columns] == ["name"] 404 | assert len(messages) == 1 405 | assert messages[0][0] == "Changes to table have been saved" 406 | 407 | 408 | @pytest.mark.asyncio 409 | async def test_static_assets(db_path): 410 | ds = Datasette([db_path]) 411 | for path in ( 412 | "/-/static-plugins/datasette-edit-schema/draggable.1.0.0-beta.11.bundle.min.js", 413 | ): 414 | response = await ds.client.post(path) 415 | assert response.status_code == 200 416 | 417 | 418 | @pytest.mark.asyncio 419 | @pytest.mark.parametrize( 420 | "path", ["/-/edit-schema", "/-/edit-schema/data", "/-/edit-schema/data/creatures"] 421 | ) 422 | async def test_permission_edit_schema(db_path, path): 423 | # root user has edit-schema which allows access to all 424 | ds = Datasette([db_path]) 425 | someuser_cookies = {"ds_actor": ds.sign({"a": {"id": "someuser"}}, "actor")} 426 | root_cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 427 | response = await ds.client.get(path) 428 | assert response.status_code == 403 429 | # Should deny with someuser cookie 430 | response2 = await ds.client.get("" + path, cookies=someuser_cookies) 431 | assert response2.status_code == 403 432 | # Should allow with root cookies 433 | response3 = await ds.client.get("" + path, cookies=root_cookies) 434 | assert response3.status_code in (200, 302) 435 | 436 | 437 | @pytest.mark.asyncio 438 | @pytest.mark.parametrize( 439 | "rules_allow,should_work", 440 | ( 441 | ( 442 | [ 443 | Rule( 444 | actor_id="user", 445 | action="edit-schema", 446 | database="data", 447 | resource=None, 448 | ), 449 | ], 450 | True, 451 | ), 452 | ( 453 | [ 454 | Rule( 455 | actor_id="user2", 456 | action="edit-schema", 457 | database="data", 458 | resource=None, 459 | ), 460 | ], 461 | False, 462 | ), 463 | ( 464 | [ 465 | Rule( 466 | actor_id="user", 467 | action="create-table", 468 | database="data", 469 | resource=None, 470 | ), 471 | ], 472 | True, 473 | ), 474 | ( 475 | [ 476 | Rule( 477 | actor_id="user2", 478 | action="create-table", 479 | database="data", 480 | resource=None, 481 | ), 482 | ], 483 | False, 484 | ), 485 | ), 486 | ) 487 | async def test_permission_create_table(permission_plugin, ds, rules_allow, should_work): 488 | ds._rules_allow = rules_allow 489 | cookies = {"ds_actor": ds.sign({"a": {"id": "user"}}, "actor")} 490 | csrftoken_r = await ds.client.get("/-/edit-schema/data/-/create", cookies=cookies) 491 | if not should_work: 492 | assert csrftoken_r.status_code == 403 493 | return 494 | assert csrftoken_r.status_code == 200 495 | csrftoken = csrftoken_r.cookies["ds_csrftoken"] 496 | cookies["ds_csrftoken"] = csrftoken 497 | post_data = { 498 | "primary_key_name": "id", 499 | "primary_key_type": "INTEGER", 500 | "table_name": "foo", 501 | "csrftoken": csrftoken, 502 | } 503 | response = await ds.client.post( 504 | "/-/edit-schema/data/-/create", 505 | data=post_data, 506 | cookies=cookies, 507 | ) 508 | assert response.status_code == 302 509 | 510 | 511 | @pytest.mark.asyncio 512 | @pytest.mark.parametrize( 513 | "rules_allow,should_work", 514 | ( 515 | ( 516 | [ 517 | Rule( 518 | actor_id="user", 519 | action="edit-schema", 520 | database="data", 521 | resource=None, 522 | ), 523 | ], 524 | True, 525 | ), 526 | ( 527 | [ 528 | Rule( 529 | actor_id="user2", 530 | action="edit-schema", 531 | database="data", 532 | resource=None, 533 | ), 534 | ], 535 | False, 536 | ), 537 | ( 538 | [ 539 | Rule( 540 | actor_id="user", 541 | action="alter-table", 542 | database="data", 543 | resource="museums", 544 | ), 545 | ], 546 | True, 547 | ), 548 | ( 549 | [ 550 | Rule( 551 | actor_id="user2", 552 | action="alter-table", 553 | database="data", 554 | resource="museums", 555 | ), 556 | ], 557 | False, 558 | ), 559 | ), 560 | ) 561 | async def test_permission_alter_table(permission_plugin, ds, rules_allow, should_work): 562 | ds._rules_allow = rules_allow 563 | cookies = {"ds_actor": ds.sign({"a": {"id": "user"}}, "actor")} 564 | csrftoken_r = await ds.client.get("/-/edit-schema/data/museums", cookies=cookies) 565 | if not should_work: 566 | assert csrftoken_r.status_code == 403 567 | return 568 | assert csrftoken_r.status_code == 200 569 | csrftoken = csrftoken_r.cookies["ds_csrftoken"] 570 | cookies["ds_csrftoken"] = csrftoken 571 | post_data = { 572 | "action": "update_primary_key", 573 | "primary_key": "name", 574 | "csrftoken": csrftoken, 575 | } 576 | response = await ds.client.post( 577 | "/-/edit-schema/data/museums", 578 | data=post_data, 579 | cookies=cookies, 580 | ) 581 | assert response.status_code == 302 582 | 583 | 584 | @pytest.mark.asyncio 585 | async def test_table_form_contains_schema(permission_plugin, ds): 586 | ds._rules_allow = [ 587 | Rule( 588 | actor_id="user", 589 | action="edit-schema", 590 | database="data", 591 | resource=None, 592 | ), 593 | ] 594 | response = await ds.client.get( 595 | "/-/edit-schema/data/creatures", 596 | cookies={"ds_actor": ds.sign({"a": {"id": "user"}}, "actor")}, 597 | ) 598 | assert response.status_code == 200 599 | assert ( 600 | "CREATE TABLE [creatures]" in response.text 601 | # In case we remove '[' in the future: 602 | or "CREATE TABLE creatures" in response.text 603 | ) 604 | 605 | 606 | @pytest.mark.asyncio 607 | @pytest.mark.parametrize( 608 | "new_name,should_work,expected_message", 609 | [ 610 | ("valid", True, "Table renamed to 'valid'"), 611 | ("]]]", False, 'Error renaming table: unrecognized token: "]"'), 612 | ("creatures", True, "Table name was the same"), 613 | ("", False, "New table name is required"), 614 | ("other_table", False, "A table called 'other_table' already exists"), 615 | ], 616 | ) 617 | async def test_rename_table(db_path, new_name, should_work, expected_message): 618 | ds = Datasette([db_path]) 619 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 620 | csrftoken = ( 621 | await ds.client.get("/-/edit-schema/data/creatures", cookies=cookies) 622 | ).cookies["ds_csrftoken"] 623 | before_schema = sqlite_utils.Database(db_path)["creatures"].schema 624 | response = await ds.client.post( 625 | "/-/edit-schema/data/creatures", 626 | data={ 627 | "rename_table": "1", 628 | "name": new_name, 629 | "csrftoken": csrftoken, 630 | }, 631 | cookies=dict(cookies, ds_csrftoken=csrftoken), 632 | ) 633 | assert response.status_code == 302 634 | if should_work: 635 | expected_path = "/-/edit-schema/data/{}".format(new_name) 636 | if expected_message != "Table name was the same": 637 | event = get_last_event(ds) 638 | if event: 639 | assert event.name == "alter-table" 640 | assert event.table == new_name 641 | assert new_name in event.properties()["after_schema"] 642 | assert "creatures" in event.properties()["before_schema"] 643 | 644 | else: 645 | expected_path = "/-/edit-schema/data/creatures" 646 | assert response.headers["location"] == expected_path 647 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 648 | assert len(messages) == 1 649 | assert messages[0][0] == expected_message 650 | if should_work: 651 | # Should have tracked alter-table against the new table name 652 | event = get_last_event(ds) 653 | if expected_message == "Table name was the same": 654 | assert event is None 655 | else: 656 | assert event.name == "alter-table" 657 | assert event.before_schema == before_schema 658 | assert event.after_schema == sqlite_utils.Database(db_path)[new_name].schema 659 | 660 | 661 | @pytest.mark.asyncio 662 | @pytest.mark.parametrize( 663 | "path,expected_breadcrumbs", 664 | ( 665 | ("/-/edit-schema/data", ['home', 'data']), 666 | ( 667 | "/-/edit-schema/data/creatures", 668 | [ 669 | 'home', 670 | 'data', 671 | 'creatures', 672 | ], 673 | ), 674 | ), 675 | ) 676 | async def test_breadcrumbs(db_path, path, expected_breadcrumbs): 677 | ds = Datasette([db_path]) 678 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 679 | response = await ds.client.get(path, cookies=cookies) 680 | assert response.status_code == 200 681 | breadcrumbs = response.text.split('

')[1].split("

")[0] 682 | for crumb in expected_breadcrumbs: 683 | assert crumb in breadcrumbs 684 | 685 | 686 | def test_potential_foreign_keys(db): 687 | potentials = potential_foreign_keys( 688 | db.conn, 689 | "museums", 690 | ["name", "city_id"], 691 | get_primary_keys(db.conn), 692 | ) 693 | assert potentials == {"name": [], "city_id": [("cities", "id")]} 694 | 695 | 696 | @pytest.mark.asyncio 697 | async def test_edit_form_shows_suggestions(db_path): 698 | # Test for suggested foreign keys and primary keys 699 | ds = Datasette([db_path]) 700 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 701 | response = await ds.client.get("/-/edit-schema/data/museums", cookies=cookies) 702 | assert response.status_code == 200 703 | # Should suggest two of the three columns as primary keys 704 | soup = BeautifulSoup(response.text, "html5lib") 705 | assert "

Change the primary key

" in response.text 706 | pk_options = get_options(soup, "primary_key") 707 | assert pk_options == [ 708 | {"value": "id", "text": "id (current)", "selected": True}, 709 | {"value": "name", "text": "name", "selected": False}, 710 | ] 711 | 712 | # Test foreign key suggestions 713 | selects = soup.find_all("select", attrs={"name": re.compile("^fk.")}) 714 | select_options = [(s["name"], get_options(soup, s["name"])) for s in selects] 715 | assert select_options == [ 716 | ( 717 | "fk.id", 718 | [ 719 | { 720 | "value": "-- no suggestions --", 721 | "text": "-- no suggestions --", 722 | "selected": False, 723 | }, 724 | {"value": "cities.id", "text": "cities.id", "selected": False}, 725 | { 726 | "value": "distractions.id", 727 | "text": "distractions.id", 728 | "selected": False, 729 | }, 730 | ], 731 | ), 732 | ( 733 | "fk.name", 734 | [ 735 | { 736 | "value": "-- no suggestions --", 737 | "text": "-- no suggestions --", 738 | "selected": False, 739 | }, 740 | {"value": "cities.id", "text": "cities.id", "selected": False}, 741 | { 742 | "value": "distractions.id", 743 | "text": "distractions.id", 744 | "selected": False, 745 | }, 746 | ], 747 | ), 748 | ( 749 | "fk.city_id", 750 | [ 751 | {"value": "-- none --", "text": "-- none --", "selected": False}, 752 | { 753 | "value": "cities.id", 754 | "text": "cities.id (suggested)", 755 | "selected": False, 756 | }, 757 | { 758 | "value": "distractions.id", 759 | "text": "distractions.id", 760 | "selected": False, 761 | }, 762 | ], 763 | ), 764 | ] 765 | 766 | 767 | @pytest.mark.asyncio 768 | async def test_edit_form_for_empty_table(db_path): 769 | # https://github.com/simonw/datasette-edit-schema/issues/38 770 | ds = Datasette([db_path]) 771 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 772 | response = await ds.client.get("/-/edit-schema/data/empty_table", cookies=cookies) 773 | assert response.status_code == 200 774 | 775 | # It shouldn't suggest any foreign keys, since there are no records 776 | assert " (suggested)" not in response.text 777 | 778 | 779 | @pytest.mark.asyncio 780 | @pytest.mark.parametrize( 781 | "table,post_data,expected_fks,expected_pk,expected_message", 782 | ( 783 | # Foreign key edit 784 | ( 785 | "museums", 786 | {"action": "update_foreign_keys", "fk.city_id": "cities.id"}, 787 | [("museums", "city_id", "cities", "id")], 788 | ["id"], 789 | "Foreign keys updated to city_id → cities.id", 790 | ), 791 | # Set the primary key to be a foreign key 792 | ( 793 | "museums", 794 | {"action": "update_foreign_keys", "fk.id": "cities.id"}, 795 | [("museums", "id", "cities", "id")], 796 | ["id"], 797 | "Foreign keys updated to id → cities.id", 798 | ), 799 | # No changes to foreign keys 800 | ( 801 | "museums", 802 | {"action": "update_foreign_keys"}, 803 | [], 804 | ["id"], 805 | "No changes to foreign keys", 806 | ), 807 | # Remove foreign keys 808 | ( 809 | "has_foreign_keys", 810 | {"action": "update_foreign_keys", "fk.distraction_id": ""}, 811 | [], 812 | ["id"], 813 | "Foreign keys removed", 814 | ), 815 | # Point existing foreign key at something else 816 | ( 817 | "has_foreign_keys", 818 | {"action": "update_foreign_keys", "fk.distraction_id": "cities.id"}, 819 | [("has_foreign_keys", "distraction_id", "cities", "id")], 820 | ["id"], 821 | "Foreign keys updated to distraction_id → cities.id", 822 | ), 823 | # Same again for tables with weird characters in their names 824 | ( 825 | "animal.name/with/slashes", 826 | { 827 | "action": "update_foreign_keys", 828 | "fk.species": "table~2Ename~2Fwith~2Fslashes~2Ecategories.id", 829 | }, 830 | [ 831 | ( 832 | "animal.name/with/slashes", 833 | "species", 834 | "table.name/with/slashes.categories", 835 | "id", 836 | ) 837 | ], 838 | ["id"], 839 | "Foreign keys updated to species → table.name/with/slashes.categories.id", 840 | ), 841 | # Change primary key in a way that works 842 | ( 843 | "museums", 844 | {"action": "update_primary_key", "primary_key": "name"}, 845 | [], 846 | ["name"], 847 | "Primary key for 'museums' is now 'name'", 848 | ), 849 | # And a way that returns an error 850 | ( 851 | "museums", 852 | {"action": "update_primary_key", "primary_key": "city_id"}, 853 | [], 854 | ["id"], 855 | "Column 'city_id' is not unique", 856 | ), 857 | ), 858 | ) 859 | async def test_edit_keys( 860 | db_path, table, post_data, expected_fks, expected_pk, expected_message 861 | ): 862 | ds = Datasette([db_path]) 863 | # Grab a csrftoken 864 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 865 | csrftoken_r = await ds.client.get( 866 | "/-/edit-schema/data/{}".format(tilde_encode(table)), cookies=cookies 867 | ) 868 | csrftoken = csrftoken_r.cookies["ds_csrftoken"] 869 | cookies["ds_csrftoken"] = csrftoken 870 | post_data["csrftoken"] = csrftoken 871 | response = await ds.client.post( 872 | "/-/edit-schema/data/{}".format(tilde_encode(table)), 873 | data=post_data, 874 | cookies=cookies, 875 | ) 876 | assert response.status_code == 302 877 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 878 | assert len(messages) == 1 879 | assert messages[0][0] == expected_message 880 | db = sqlite_utils.Database(db_path) 881 | assert db[table].foreign_keys == expected_fks 882 | assert db[table].pks == expected_pk 883 | 884 | 885 | def get_options(soup, name): 886 | select = soup.find("select", attrs={"name": name}) 887 | return [ 888 | { 889 | "value": o.get("value") or o.text, 890 | "text": o.text, 891 | "selected": bool(o.get("selected")), 892 | } 893 | for o in select.find_all("option") 894 | ] 895 | 896 | 897 | @pytest.mark.asyncio 898 | @pytest.mark.parametrize( 899 | "post_data,expected_message,expected_schema", 900 | ( 901 | ( 902 | {"primary_key_name": "id", "primary_key_type": "INTEGER"}, 903 | "Table name is required", 904 | None, 905 | ), 906 | ( 907 | { 908 | "primary_key_name": "id", 909 | "primary_key_type": "INTEGER", 910 | "table_name": "museums", 911 | }, 912 | "Table already exists", 913 | None, 914 | ), 915 | ( 916 | { 917 | "primary_key_name": "id", 918 | "primary_key_type": "INTEGER", 919 | "table_name": "foo", 920 | }, 921 | "Table has been created", 922 | {"id": int}, 923 | ), 924 | ( 925 | { 926 | "primary_key_name": "my_pk", 927 | "primary_key_type": "TEXT", 928 | "table_name": "foo", 929 | "column-name.0": "col1_text", 930 | "column-type.0": "TEXT", 931 | "column-sort.0": "2", 932 | "column-name.1": "col2_int", 933 | "column-type.1": "INTEGER", 934 | "column-sort.1": "1", 935 | "column-name.2": "col3_real", 936 | "column-type.2": "REAL", 937 | "column-sort.2": "3", 938 | "column-name.3": "col4_blob", 939 | "column-type.3": "BLOB", 940 | "column-sort.3": "4", 941 | }, 942 | "Table has been created", 943 | { 944 | "my_pk": str, 945 | "col2_int": int, 946 | "col1_text": str, 947 | "col3_real": float, 948 | "col4_blob": bytes, 949 | }, 950 | ), 951 | ), 952 | ) 953 | async def test_create_table(db_path, post_data, expected_message, expected_schema): 954 | ds = Datasette([db_path]) 955 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 956 | csrftoken_r = await ds.client.get("/-/edit-schema/data/-/create", cookies=cookies) 957 | csrftoken = csrftoken_r.cookies["ds_csrftoken"] 958 | cookies["ds_csrftoken"] = csrftoken 959 | post_data["csrftoken"] = csrftoken 960 | response = await ds.client.post( 961 | "/-/edit-schema/data/-/create", 962 | data=post_data, 963 | cookies=cookies, 964 | ) 965 | assert response.status_code == 302 966 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 967 | assert len(messages) == 1 968 | assert messages[0][0] == expected_message 969 | if expected_schema is not None: 970 | db = sqlite_utils.Database(db_path) 971 | assert db[post_data["table_name"]].columns_dict == expected_schema 972 | # create-table should have been tracked 973 | event = get_last_event(ds) 974 | if event: 975 | assert event.name == "create-table" 976 | 977 | 978 | def test_examples_for_columns(): 979 | db = sqlite_utils.Database(memory=True) 980 | db["examples"].insert_all( 981 | [ 982 | {"id": 1, "name": "Name 1", "age": 15, "weight": None, "photo's": b"Blob"}, 983 | {"id": 2, "name": None, "age": 25, "weight": 2.3, "photo's": b"Blob2"}, 984 | {"id": 3, "name": "", "age": None, "weight": 2.0, "photo's": b"Blob3"}, 985 | {"id": 4, "name": "Name 4", "age": 18, "weight": 1.7, "photo's": b"Blob4"}, 986 | {"id": 5, "name": "Name 5", "age": 21, "weight": None, "photo's": b"Blob5"}, 987 | {"id": 6, "name": "Name 6", "age": 35, "weight": 2.5, "photo's": b"Blob6"}, 988 | {"id": 7, "name": "Name 7", "age": 28, "weight": 1.9, "photo's": b"Blob7"}, 989 | {"id": 8, "name": "Name 8", "age": 22, "weight": 2.1, "photo's": b"Blob8"}, 990 | {"id": 9, "name": "Name 9", "age": 20, "weight": 1.5, "photo's": b"Blob9"}, 991 | { 992 | "id": 10, 993 | "name": "Name 10", 994 | "age": 40, 995 | "weight": 2.8, 996 | "photo's": b"Blob10", 997 | }, 998 | ] 999 | ) 1000 | examples = examples_for_columns(db.conn, "examples") 1001 | assert examples == { 1002 | "age": ["15", "25", "18", "21", "35"], 1003 | "id": ["1", "2", "3", "4", "5"], 1004 | "name": ["Name 1", "Name 4", "Name 5", "Name 6", "Name 7"], 1005 | "weight": ["2.3", "2.0", "1.7", "2.5", "1.9"], 1006 | } 1007 | 1008 | 1009 | def test_potential_primary_keys(): 1010 | db = sqlite_utils.Database(memory=True) 1011 | db["examples"].insert_all( 1012 | [ 1013 | {"id": 1, "photo's": b"Blob", "cat": "1"}, 1014 | {"id": 2, "photo's": b"Blob2", "cat": "1"}, 1015 | {"id": 3, "photo's": b"Blob3", "cat": "2"}, 1016 | ] 1017 | ) 1018 | potentials = potential_primary_keys(db.conn, "examples", ["id", "photo's", "cat"]) 1019 | assert potentials == ["id", "photo's"] 1020 | 1021 | 1022 | def test_potential_primary_keys_primary_key_only_table(): 1023 | # https://github.com/simonw/datasette-edit-schema/issues/51 1024 | db = sqlite_utils.Database(memory=True) 1025 | db["examples"].insert_all( 1026 | [ 1027 | {"one_id": 1, "two_id": 2}, 1028 | {"one_id": 2, "two_id": 2}, 1029 | ], 1030 | pk=("one_id", "two_id"), 1031 | ) 1032 | potentials = potential_primary_keys(db.conn, "examples", []) 1033 | assert potentials == [] 1034 | 1035 | 1036 | @pytest.mark.asyncio 1037 | @pytest.mark.parametrize( 1038 | "table,post_data,expected_message,expected_indexes", 1039 | ( 1040 | ( 1041 | "museums", 1042 | {"add_index": "1"}, 1043 | "Column name is required", 1044 | [], 1045 | ), 1046 | ( 1047 | "museums", 1048 | {"add_index": "1", "add_index_column": "name"}, 1049 | "Index added on name", 1050 | [{"name": "idx_museums_name", "columns": ["name"], "unique": 0}], 1051 | ), 1052 | ( 1053 | "museums", 1054 | {"add_index": "1", "add_index_column": "name", "add_index_unique": 1}, 1055 | "Unique index added on name", 1056 | [{"name": "idx_museums_name", "columns": ["name"], "unique": 1}], 1057 | ), 1058 | ( 1059 | "museums", 1060 | {"add_index": "1", "add_index_column": "city", "add_index_unique": 1}, 1061 | "no such column: city", 1062 | [], 1063 | ), 1064 | ( 1065 | "museums", 1066 | {"add_index": "1", "add_index_column": "city_id", "add_index_unique": 1}, 1067 | "UNIQUE constraint failed: museums.city_id", 1068 | [], 1069 | ), 1070 | # Tests for removing an index 1071 | ( 1072 | "has_indexes", 1073 | {"drop_index_bad": "1"}, 1074 | "no such index: bad", 1075 | [ 1076 | {"columns": ["name"], "name": "name_unique_index", "unique": 1}, 1077 | {"columns": ["name"], "name": "name_index", "unique": 0}, 1078 | ], 1079 | ), 1080 | ( 1081 | "has_indexes", 1082 | {"drop_index_name_index": "1"}, 1083 | "Index dropped: name_index", 1084 | [{"columns": ["name"], "name": "name_unique_index", "unique": 1}], 1085 | ), 1086 | ( 1087 | "has_indexes", 1088 | {"drop_index_name_unique_index": "1"}, 1089 | "Index dropped: name_unique_index", 1090 | [{"columns": ["name"], "name": "name_index", "unique": 0}], 1091 | ), 1092 | # Test for table with surprising characters in its name 1093 | ( 1094 | "animal.name/with/slashes", 1095 | {"add_index": "1", "add_index_column": "species"}, 1096 | "Index added on species", 1097 | [ 1098 | { 1099 | "name": "idx_animal.name/with/slashes_species", 1100 | "columns": ["species"], 1101 | "unique": 0, 1102 | } 1103 | ], 1104 | ), 1105 | ), 1106 | ) 1107 | async def test_add_remove_index( 1108 | db_path, table, post_data, expected_message, expected_indexes 1109 | ): 1110 | ds = Datasette([db_path]) 1111 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")} 1112 | get_response = await ds.client.get( 1113 | "/-/edit-schema/data/{}".format(tilde_encode(table)), cookies=cookies 1114 | ) 1115 | assert get_response.status_code == 200 1116 | csrftoken = get_response.cookies["ds_csrftoken"] 1117 | cookies["ds_csrftoken"] = csrftoken 1118 | post_data["csrftoken"] = csrftoken 1119 | response = await ds.client.post( 1120 | "/-/edit-schema/data/{}".format(tilde_encode(table)), 1121 | cookies=cookies, 1122 | data=post_data, 1123 | ) 1124 | assert response.status_code == 302 1125 | messages = ds.unsign(response.cookies["ds_messages"], "messages") 1126 | assert len(messages) == 1 1127 | assert messages[0][0] == expected_message 1128 | db = sqlite_utils.Database(db_path) 1129 | indexes = db[table].indexes 1130 | assert [ 1131 | {"name": index.name, "columns": index.columns, "unique": index.unique} 1132 | for index in indexes 1133 | if "sqlite_autoindex" not in index.name 1134 | ] == expected_indexes 1135 | 1136 | 1137 | @pytest.mark.asyncio 1138 | async def test_database_and_table_level_permissions(tmp_path): 1139 | marketing_path = str(tmp_path / "marketing.db") 1140 | sales_path = str(tmp_path / "sales.db") 1141 | marketing_db = sqlite_utils.Database(marketing_path) 1142 | marketing_db["one"].insert({"id": 1}, pk="id") 1143 | sales_db = sqlite_utils.Database(sales_path) 1144 | sales_db["notes"].insert({"id": 1, "note": "Hello"}, pk="id") 1145 | sales_db["not_allowed"].insert({"id": 1}, pk="id") 1146 | 1147 | ds = Datasette( 1148 | [marketing_path, sales_path], 1149 | config={ 1150 | "databases": { 1151 | "marketing": { 1152 | "permissions": { 1153 | "create-table": {"id": "pelican"}, 1154 | "drop-table": {"id": "pelican"}, 1155 | "alter-table": {"id": "pelican"}, 1156 | } 1157 | }, 1158 | "sales": { 1159 | "tables": { 1160 | "notes": {"permissions": {"alter-table": {"id": "pelican"}}} 1161 | } 1162 | }, 1163 | } 1164 | }, 1165 | ) 1166 | 1167 | pelican_cookies = {"ds_actor": ds.sign({"a": {"id": "pelican"}}, "actor")} 1168 | walrus_cookies = {"ds_actor": ds.sign({"a": {"id": "walrus"}}, "actor")} 1169 | 1170 | async def pelican_can_see(path): 1171 | response = await ds.client.get(path, cookies=pelican_cookies) 1172 | return response if response.status_code == 200 else None 1173 | 1174 | async def walrus_can_see(path): 1175 | response = await ds.client.get(path, cookies=walrus_cookies) 1176 | return response if response.status_code == 200 else None 1177 | 1178 | assert await pelican_can_see("/-/edit-schema/marketing/one") 1179 | assert not await walrus_can_see("/-/edit-schema/marketing/one") 1180 | 1181 | # pelican cannot edit sales/not_allowed 1182 | assert not await pelican_can_see("/-/edit-schema/sales/not_allowed") 1183 | assert not await walrus_can_see("/-/edit-schema/sales/not_allowed") 1184 | 1185 | # pelican can edit notes - but not drop or rename it 1186 | response = await pelican_can_see("/-/edit-schema/sales/notes") 1187 | assert response 1188 | assert '' in response.text 1189 | assert 'value="Drop this table">' not in response.text 1190 | assert ' ' not in response.text 1191 | 1192 | # But they can drop table or rename table in marketing/one 1193 | response2 = await pelican_can_see("/-/edit-schema/marketing/one") 1194 | assert response2 1195 | assert 'value="Drop this table">' in response2.text 1196 | assert ' ' in response2.text 1197 | -------------------------------------------------------------------------------- /update-screenshot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Grab the database 4 | wget https://datasette.io/content.db 5 | 6 | # Delete the triggers on the licenses table 7 | sqlite3 content.db "DROP TRIGGER IF EXISTS licenses_ai" 8 | sqlite3 content.db "DROP TRIGGER IF EXISTS licenses_ad" 9 | sqlite3 content.db "DROP TRIGGER IF EXISTS licenses_au" 10 | 11 | # Setup the root plugin 12 | mkdir shot-plugins 13 | cat > shot-plugins/root.py <