├── pytest.ini
├── upload-demo.gif
├── .gitignore
├── .github
└── workflows
│ ├── test.yml
│ └── publish.yml
├── setup.py
├── README.md
├── datasette_upload_dbs
├── __init__.py
└── templates
│ └── upload_dbs.html
├── tests
└── test_upload_dbs.py
└── LICENSE
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | asyncio_mode = strict
3 |
--------------------------------------------------------------------------------
/upload-demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/asg017/datasette-upload-dbs/main/upload-demo.gif
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .venv
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 | venv
6 | .eggs
7 | .pytest_cache
8 | *.egg-info
9 | .DS_Store
10 | .vscode
11 | dist
12 | build
13 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | test:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | python-version: ["3.7", "3.8", "3.9", "3.10"]
11 | steps:
12 | - uses: actions/checkout@v2
13 | - name: Set up Python ${{ matrix.python-version }}
14 | uses: actions/setup-python@v2
15 | with:
16 | python-version: ${{ matrix.python-version }}
17 | - uses: actions/cache@v2
18 | name: Configure pip caching
19 | with:
20 | path: ~/.cache/pip
21 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
22 | restore-keys: |
23 | ${{ runner.os }}-pip-
24 | - name: Install dependencies
25 | run: |
26 | pip install -e '.[test]'
27 | - name: Run tests
28 | run: |
29 | pytest
30 |
31 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 | import os
3 |
4 | VERSION = "0.2"
5 |
6 |
7 | def get_long_description():
8 | with open(
9 | os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md"),
10 | encoding="utf8",
11 | ) as fp:
12 | return fp.read()
13 |
14 |
15 | setup(
16 | name="datasette-upload-dbs",
17 | description="Upload SQLite database files to Datasette",
18 | long_description=get_long_description(),
19 | long_description_content_type="text/markdown",
20 | author="Simon Willison",
21 | url="https://github.com/simonw/datasette-upload-dbs",
22 | project_urls={
23 | "Issues": "https://github.com/simonw/datasette-upload-dbs/issues",
24 | "CI": "https://github.com/simonw/datasette-upload-dbs/actions",
25 | "Changelog": "https://github.com/simonw/datasette-upload-dbs/releases",
26 | },
27 | license="Apache License, Version 2.0",
28 | classifiers=[
29 | "Framework :: Datasette",
30 | "License :: OSI Approved :: Apache Software License",
31 | ],
32 | version=VERSION,
33 | packages=["datasette_upload_dbs"],
34 | entry_points={"datasette": ["upload_dbs = datasette_upload_dbs"]},
35 | install_requires=["datasette", "starlette"],
36 | extras_require={"test": ["pytest", "pytest-asyncio"]},
37 | package_data={"datasette_upload_dbs": ["templates/*"]},
38 | python_requires=">=3.7",
39 | )
40 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish Python Package
2 |
3 | on:
4 | release:
5 | types: [created]
6 |
7 | jobs:
8 | test:
9 | runs-on: ubuntu-latest
10 | strategy:
11 | matrix:
12 | python-version: ["3.7", "3.8", "3.9", "3.10"]
13 | steps:
14 | - uses: actions/checkout@v2
15 | - name: Set up Python ${{ matrix.python-version }}
16 | uses: actions/setup-python@v2
17 | with:
18 | python-version: ${{ matrix.python-version }}
19 | - uses: actions/cache@v2
20 | name: Configure pip caching
21 | with:
22 | path: ~/.cache/pip
23 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
24 | restore-keys: |
25 | ${{ runner.os }}-pip-
26 | - name: Install dependencies
27 | run: |
28 | pip install -e '.[test]'
29 | - name: Run tests
30 | run: |
31 | pytest
32 | deploy:
33 | runs-on: ubuntu-latest
34 | needs: [test]
35 | steps:
36 | - uses: actions/checkout@v2
37 | - name: Set up Python
38 | uses: actions/setup-python@v2
39 | with:
40 | python-version: "3.10"
41 | - uses: actions/cache@v2
42 | name: Configure pip caching
43 | with:
44 | path: ~/.cache/pip
45 | key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}
46 | restore-keys: |
47 | ${{ runner.os }}-publish-pip-
48 | - name: Install dependencies
49 | run: |
50 | pip install setuptools wheel twine build
51 | - name: Publish
52 | env:
53 | TWINE_USERNAME: __token__
54 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
55 | run: |
56 | python -m build
57 | twine upload dist/*
58 |
59 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # datasette-upload-dbs
2 |
3 | [](https://pypi.org/project/datasette-upload-dbs/)
4 | [](https://github.com/simonw/datasette-upload-dbs/releases)
5 | [](https://github.com/simonw/datasette-upload-dbs/actions?query=workflow%3ATest)
6 | [](https://github.com/simonw/datasette-upload-dbs/blob/main/LICENSE)
7 |
8 | Upload SQLite database files to Datasette
9 |
10 | ## Installation
11 |
12 | Install this plugin in the same environment as Datasette.
13 |
14 | datasette install datasette-upload-dbs
15 |
16 | ## Configuration
17 |
18 | This plugin requires you to configure a directory in which uploaded files will be stored.
19 |
20 | On startup, Datasette will automatically load any SQLite files that it finds in that directory. This means it is safe to restart your server in between file uploads.
21 |
22 | To configure the directory as `/home/datasette/uploads`, add this to a `metadata.yml` configuration file:
23 |
24 | ```yaml
25 | plugins:
26 | datasette-upload-dbs:
27 | directory: /home/datasette/uploads
28 | ```
29 |
30 | Or if you are using `metadata.json`:
31 |
32 | ```json
33 | {
34 | "plugins": {
35 | "datasette-upload-dbs": {
36 | "directory": "/home/datasette/uploads"
37 | }
38 | }
39 | }
40 | ```
41 | You can use `"."` for the current folder when the server starts, or `"uploads"` for a folder relative to that folder. The folder will be created on startup if it does not already exist.
42 |
43 | Then start Datasette like this:
44 |
45 | datasette -m metadata.yml
46 |
47 | The plugin defaults to loading all databases in the configured directory.
48 |
49 | You can disable this by adding the following setting:
50 | ```
51 | "skip_startup_scan": true
52 | ```
53 | ## Usage
54 |
55 | Only users with the `upload-dbs` permission will be able to upload files. The `root` user has this permission by default - other users can be granted access using permission plugins, see the [Permissions](https://docs.datasette.io/en/stable/authentication.html#permissions) documentation for details.
56 |
57 | To start Datasette as the root user, run this:
58 |
59 | datasette -m metadata.yml --root
60 |
61 | And follow the link that is displayd on the console.
62 |
63 | If a user has that permission they will see an "Upload database" link in the navigation menu.
64 |
65 | This will take them to `/-/upload-dbs` where they will be able to upload database files, by selecting them or by dragging them onto the drop area.
66 |
67 | 
68 |
69 | ## Development
70 |
71 | To set up this plugin locally, first checkout the code. Then create a new virtual environment:
72 |
73 | cd datasette-upload-dbs
74 | python3 -m venv venv
75 | source venv/bin/activate
76 |
77 | Now install the dependencies and test dependencies:
78 |
79 | pip install -e '.[test]'
80 |
81 | To run the tests:
82 |
83 | pytest
84 |
--------------------------------------------------------------------------------
/datasette_upload_dbs/__init__.py:
--------------------------------------------------------------------------------
1 | from datasette import hookimpl
2 | from datasette.database import Database
3 | from datasette.utils.asgi import Response, Forbidden
4 | from datasette.utils import to_css_class
5 | from datasette.utils.sqlite import sqlite3
6 | from starlette.requests import Request
7 | from shutil import copyfileobj
8 | import pathlib
9 |
10 |
11 | @hookimpl
12 | def permission_allowed(actor, action):
13 | if action == "upload-dbs" and actor and actor.get("id") == "root":
14 | return True
15 |
16 |
17 | @hookimpl
18 | def register_routes():
19 | return [
20 | (r"^/-/upload-dbs$", upload_dbs),
21 | (r"^/-/upload-db$", lambda: Response.redirect("/-/upload-dbs")),
22 | ]
23 |
24 |
25 | @hookimpl
26 | def menu_links(datasette, actor):
27 | async def inner():
28 | if await datasette.permission_allowed(
29 | actor, "upload-dbs", default=False
30 | ) and _configured(datasette):
31 | return [
32 | {
33 | "href": datasette.urls.path("/-/upload-dbs"),
34 | "label": "Upload database",
35 | },
36 | ]
37 |
38 | return inner
39 |
40 |
41 | @hookimpl
42 | def startup(datasette):
43 | # Load any databases located in the directory folder
44 | config = datasette.plugin_config("datasette-upload-dbs") or {}
45 | if config.get("skip_startup_scan"):
46 | return
47 | directory = config.get("directory")
48 | if not directory:
49 | return
50 | path = pathlib.Path(directory)
51 | database_files = path.glob("*.db")
52 | for file_path in database_files:
53 | # Needs to set is_mutable=True here because the default was False
54 | # in Datasette versions up to and including 0.62
55 | datasette.add_database(
56 | Database(datasette, path=str(file_path), is_mutable=True),
57 | )
58 |
59 |
60 | def _configured(datasette):
61 | return (datasette.plugin_config("datasette-upload-dbs") or {}).get("directory")
62 |
63 |
64 | async def upload_dbs(scope, receive, datasette, request):
65 | if not await datasette.permission_allowed(
66 | request.actor, "upload-dbs", default=False
67 | ):
68 | raise Forbidden("Permission denied for upload-dbs")
69 |
70 | directory = _configured(datasette)
71 |
72 | if not directory:
73 | raise Forbidden("datasette-upload-dbs plugin has not been correctly configured")
74 |
75 | path = pathlib.Path(directory)
76 |
77 | if request.method != "POST":
78 | return Response.html(
79 | await datasette.render_template("upload_dbs.html", request=request)
80 | )
81 |
82 | # We use the Starlette request object to handle file uploads
83 | starlette_request = Request(scope, receive)
84 |
85 | async def error(msg):
86 | if is_xhr:
87 | return Response.json({"ok": False, "error": msg})
88 |
89 | return Response.html(
90 | await datasette.render_template(
91 | "upload_dbs.html",
92 | {
93 | "error": msg,
94 | },
95 | request=request,
96 | )
97 | )
98 |
99 | formdata = await starlette_request.form()
100 | db_file = formdata["db"]
101 | is_xhr = formdata.get("xhr")
102 | db_name = (formdata.get("db_name") or "").strip()
103 |
104 | if not db_name:
105 | db_name = db_file.filename.split(".")[0]
106 |
107 | db_name = to_css_class(db_name) or "db"
108 |
109 | first_16 = db_file.file.read(16)
110 | if first_16 != b"SQLite format 3\x00":
111 | return await error("File is not a valid SQLite database (invalid header)")
112 |
113 | path.mkdir(parents=True, exist_ok=True)
114 |
115 | # Copy it to its final destination
116 | filepath = path / (db_name + ".db")
117 | with open(filepath, "wb+") as target_file:
118 | db_file.file.seek(0)
119 | copyfileobj(db_file.file, target_file)
120 |
121 | # Now really verify it
122 | conn = sqlite3.connect(str(filepath))
123 | try:
124 | conn.execute("select * from sqlite_master")
125 | except sqlite3.Error as e:
126 | # Delete file, it is invalid
127 | filepath.unlink()
128 | return await error(f"File is not a valid SQLite database ({e})")
129 |
130 | # File is valid - add it to this Datasette instance
131 | db = Database(datasette, path=str(filepath), is_mutable=True)
132 | datasette.add_database(db)
133 |
134 | redirect_url = datasette.urls.database(db.name)
135 | if is_xhr:
136 | return Response.json({"ok": True, "redirect": redirect_url})
137 | else:
138 | return Response.redirect(redirect_url)
139 |
--------------------------------------------------------------------------------
/datasette_upload_dbs/templates/upload_dbs.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block title %}Upload a database{% endblock %}
4 |
5 | {% block extra_head %}
6 |
45 | {% endblock %}
46 |
47 | {% block content %}
48 |
Upload a database
49 |
50 | {% if error %}
51 | {{ error }}
52 | {% endif %}
53 |
54 |
64 |
65 |
66 |
67 |
176 | {% endblock %}
177 |
--------------------------------------------------------------------------------
/tests/test_upload_dbs.py:
--------------------------------------------------------------------------------
1 | from datasette.app import Datasette
2 | import sqlite3
3 | import pytest
4 | from io import BytesIO
5 |
6 |
7 | @pytest.mark.asyncio
8 | @pytest.mark.parametrize("auth", [True, False])
9 | async def test_menu(auth):
10 | ds = Datasette(
11 | memory=True, metadata={"plugins": {"datasette-upload-dbs": {"directory": "."}}}
12 | )
13 | cookies = {}
14 | if auth:
15 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")}
16 | response = await ds.client.get("/", cookies=cookies)
17 | assert response.status_code == 200
18 | if auth:
19 | assert "/-/upload-dbs" in response.text
20 | else:
21 | assert "/-/upload-dbs" not in response.text
22 |
23 |
24 | @pytest.mark.asyncio
25 | async def test_redirect():
26 | ds = Datasette(memory=True)
27 | response = await ds.client.get("/-/upload-db")
28 | assert response.status_code == 302
29 | assert response.headers["location"] == "/-/upload-dbs"
30 |
31 |
32 | @pytest.mark.asyncio
33 | @pytest.mark.parametrize("skip_startup_scan", (False, True))
34 | async def test_databases_loaded_on_startup(tmp_path_factory, skip_startup_scan):
35 | uploads_directory = tmp_path_factory.mktemp("uploads")
36 | for name in ("test1.db", "test2.db"):
37 | db_path = uploads_directory / name
38 | sqlite3.connect(str(db_path)).execute("create table t (id integer primary key)")
39 | config = {"directory": str(uploads_directory)}
40 | if skip_startup_scan:
41 | config["skip_startup_scan"] = True
42 |
43 | ds = Datasette(
44 | memory=True,
45 | metadata={"plugins": {"datasette-upload-dbs": config}},
46 | )
47 | await ds.invoke_startup()
48 | db_names = {"test1", "test2"}
49 | if skip_startup_scan:
50 | # Should not have any DBs
51 | assert set(ds.databases.keys()) == {"_internal", "_memory"}
52 | else:
53 | assert set(ds.databases.keys()).issuperset(db_names)
54 | for name in db_names:
55 | assert ds.databases[name].is_mutable
56 |
57 |
58 | @pytest.mark.asyncio
59 | @pytest.mark.parametrize(
60 | "authed,configured,expected_error",
61 | (
62 | (False, False, "Permission denied for upload-dbs"),
63 | (False, True, "Permission denied for upload-dbs"),
64 | (True, False, "datasette-upload-dbs plugin has not been correctly configured"),
65 | ),
66 | )
67 | async def test_errors(authed, configured, expected_error):
68 | ds = Datasette(
69 | memory=True,
70 | metadata={
71 | "plugins": {"datasette-upload-dbs": {"directory": "."}}
72 | if configured
73 | else {}
74 | },
75 | )
76 | cookies = {}
77 | if authed:
78 | cookies = {"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")}
79 | response = await ds.client.get("/-/upload-dbs", cookies=cookies)
80 | assert response.status_code == 403
81 | assert expected_error in response.text
82 |
83 |
84 | @pytest.mark.asyncio
85 | @pytest.mark.parametrize(
86 | "bytes,expected_error",
87 | (
88 | (b"bad_bytes", "File is not a valid SQLite database (invalid header)"),
89 | (
90 | b"SQLite format 3\x00invalid",
91 | "File is not a valid SQLite database (file is not a database)",
92 | ),
93 | ),
94 | )
95 | @pytest.mark.parametrize("xhr", (True, False))
96 | async def test_invalid_files(tmp_path_factory, bytes, expected_error, xhr):
97 | uploads_directory = tmp_path_factory.mktemp("uploads")
98 | ds = Datasette(
99 | memory=True,
100 | metadata={
101 | "plugins": {"datasette-upload-dbs": {"directory": str(uploads_directory)}}
102 | },
103 | )
104 | csrftoken = await _get_csrftoken(ds)
105 | # write to database
106 | response = await ds.client.post(
107 | "/-/upload-dbs",
108 | cookies={
109 | "ds_actor": ds.sign({"a": {"id": "root"}}, "actor"),
110 | "ds_csrftoken": csrftoken,
111 | },
112 | data={"csrftoken": csrftoken, "xhr": "1" if xhr else ""},
113 | files={"db": BytesIO(bytes)},
114 | )
115 | if xhr:
116 | assert response.json() == {"ok": False, "error": expected_error}
117 | else:
118 | assert f'{expected_error}
' in response.text
119 |
120 |
121 | @pytest.mark.asyncio
122 | @pytest.mark.parametrize("xhr", (True, False))
123 | @pytest.mark.parametrize(
124 | "db_file_name,db_name,expected_path",
125 | (
126 | ("temp.db", None, "/temp"),
127 | (".db", None, "/d41d8c"),
128 | ("temp.db", "custom", "/custom"),
129 | ("temp.db", "a + b + c ~ d", "/a--b--c--d-26e27e"),
130 | ),
131 | )
132 | async def test_upload(tmp_path_factory, xhr, db_file_name, db_name, expected_path):
133 | uploads_directory = tmp_path_factory.mktemp("uploads")
134 | tmp_directory = tmp_path_factory.mktemp("tmp")
135 | ds = Datasette(
136 | memory=True,
137 | metadata={
138 | "plugins": {"datasette-upload-dbs": {"directory": str(uploads_directory)}}
139 | },
140 | )
141 | csrftoken = await _get_csrftoken(ds)
142 |
143 | temp = str(tmp_directory / db_file_name)
144 | sqlite3.connect(temp).execute("create table t (id integer primary key)")
145 |
146 | upload_data = {"csrftoken": csrftoken, "xhr": "1" if xhr else ""}
147 | if db_name:
148 | upload_data["db_name"] = db_name
149 |
150 | response = await ds.client.post(
151 | "/-/upload-dbs",
152 | cookies={
153 | "ds_actor": ds.sign({"a": {"id": "root"}}, "actor"),
154 | "ds_csrftoken": csrftoken,
155 | },
156 | data=upload_data,
157 | files={"db": open(temp, "rb")},
158 | )
159 | if xhr:
160 | assert response.json() == {"ok": True, "redirect": expected_path}
161 | else:
162 | assert response.status_code == 302
163 | assert response.headers["location"] == expected_path
164 |
165 | # Datasette should serve that file
166 | table_response = await ds.client.get(f"{expected_path}/t.json?_shape=array")
167 | assert table_response.status_code == 200
168 | assert table_response.json() == []
169 |
170 | # Uploaded file should exist
171 | conn = sqlite3.connect(temp)
172 | assert conn.execute("select sql from sqlite_master").fetchall() == [
173 | ("CREATE TABLE t (id integer primary key)",)
174 | ]
175 | # And it should be mutable
176 | new_db = [db for db in ds.databases.values() if not db.name.startswith("_")][0]
177 | assert new_db.is_mutable
178 |
179 |
180 | async def _get_csrftoken(ds):
181 | return (
182 | await ds.client.get(
183 | "/-/upload-dbs",
184 | cookies={"ds_actor": ds.sign({"a": {"id": "root"}}, "actor")},
185 | )
186 | ).cookies["ds_csrftoken"]
187 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------