11 | {% endif %}
12 | {% endblock %}
13 | {# do not display relbars #}
14 | {% block relbar1 %}{% endblock %}
15 | {% block relbar2 %}
16 | {% if theme_github_fork %}
17 |
19 | {% endif %}
20 | {% endblock %}
21 | {% block sidebar1 %}{% endblock %}
22 | {% block sidebar2 %}{% endblock %}
23 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 | on: [push, pull_request]
3 | jobs:
4 | test:
5 | strategy:
6 | matrix:
7 | python-version: [3.7, 3.8, 3.9, "3.10", "3.11"]
8 | fail-fast: false
9 | name: Python ${{ matrix.python-version }}
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v3
13 | - uses: actions/setup-python@v4
14 | with:
15 | python-version: ${{ matrix.python-version }}
16 | - name: Install requirements (Python 3)
17 | run: pip install -r requirements-dev.txt && pip install .
18 | - name: Run tests
19 | run: pytest --cov=ckanserviceprovider --cov-append --cov-report=xml --disable-warnings tests
20 | - name: Upload coverage report to codecov
21 | uses: codecov/codecov-action@v3
22 | with:
23 | file: ./coverage.xml
24 |
--------------------------------------------------------------------------------
/example/jobs.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import ckanserviceprovider.job as job
4 | import ckanserviceprovider.util as util
5 |
6 |
7 | @job.synchronous
8 | def echo(task_id, input):
9 | if input["data"].startswith(">"):
10 | raise util.JobError("do not start message with >")
11 | if input["data"].startswith("#"):
12 | raise Exception("serious exception")
13 | return ">" + input["data"]
14 |
15 |
16 | @job.asynchronous
17 | def async_echo(task_id, input):
18 | if input["data"].startswith(">"):
19 | raise util.JobError("do not start message with >")
20 | if input["data"].startswith("#"):
21 | raise Exception("serious exception")
22 | return ">" + input["data"]
23 |
24 |
25 | @job.asynchronous
26 | def async_ping(task_id, input):
27 | handler = util.StoringHandler(task_id, input)
28 | logger = logging.getLogger(__name__)
29 | logger.addHandler(handler)
30 |
31 | logger.warn("ping")
32 | return "ping"
33 |
--------------------------------------------------------------------------------
/example/main.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import ckanserviceprovider.web as web
4 |
5 | import jobs
6 |
7 | # check whether jobs have been imported properly
8 | assert jobs.echo
9 |
10 |
11 | def serve():
12 | web.init()
13 | web.app.run(web.app.config.get("HOST"), web.app.config.get("PORT"))
14 |
15 |
16 | def serve_test():
17 | web.init()
18 | return web.app.test_client()
19 |
20 |
21 | def main():
22 | import argparse
23 |
24 | argparser = argparse.ArgumentParser(
25 | description="Example service",
26 | epilog='''"For a moment, nothing happened.
27 | Then, after a second or so, nothing continued to happen."''',
28 | )
29 |
30 | argparser.add_argument(
31 | "config",
32 | metavar="CONFIG",
33 | type=argparse.FileType("r"),
34 | help="configuration file",
35 | )
36 | args = argparser.parse_args()
37 |
38 | os.environ["JOB_CONFIG"] = os.path.abspath(args.config.name)
39 | serve()
40 |
41 |
42 | if __name__ == "__main__":
43 | main()
44 |
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | CKAN Service Provider
2 | =====================
3 |
4 | A simple flask app that makes functions available as synchronous or asynchronous jobs.
5 |
6 | Routes
7 | ------
8 |
9 | .. autoflask:: ckanserviceprovider.web:app
10 | :undoc-static:
11 | :include-empty-docstring:
12 |
13 | Administration
14 | --------------
15 |
16 | To view the results of a job or resubmit it, the job key, that is returned when a job is created,
17 | is needed. Alternatively, you can log in as admin or provide the secure key. The credentials for
18 | the admin user and the secure key stored in the settings file.
19 |
20 | Add a job
21 | ---------
22 |
23 | Just decorate your function and it will become available as a job::
24 |
25 | import ckanserviceprovider.job as job
26 | import ckanserviceprovider.util as util
27 |
28 | @job.synchronous
29 | def echo(task_id, input):
30 | handler = util.StoringHandler(task_id, input)
31 | logger = logging.getLogger(__name__)
32 | logger.addHandler(handler)
33 |
34 | if input['data'].startswith('>'):
35 | raise util.JobError('do not start message with >')
36 | if input['data'].startswith('#'):
37 | raise Exception('serious exception')
38 | if input['data'].startswith('&'):
39 | logger.warn('just a warning')
40 | return '>' + input['data']
41 |
42 | Expected job errors should be raised as `util.JobError`. For logging, use the handler
43 | ``util.StoringHandler`` to make sure that the logs are properly saved.
44 |
45 |
--------------------------------------------------------------------------------
/tests/test_db.py:
--------------------------------------------------------------------------------
1 | """Unit tests for ckanserviceprovider/db.py."""
2 | import pytest
3 |
4 | import ckanserviceprovider.db as db
5 |
6 |
7 | def test_validate_error_with_none():
8 | """_validate_error() should return None if given None."""
9 | assert db._validate_error(None) is None
10 |
11 |
12 | def test_validate_error_with_string():
13 | """If given a string _validate_error() should return it wrapped in a dict."""
14 | assert db._validate_error("Something went wrong") == {
15 | "message": "Something went wrong"
16 | }
17 |
18 |
19 | def test_validate_error_with_valid_dict():
20 | """If given a valid dict _validate_error() should return the same dict."""
21 | job_dict = {"message": "Something went wrong"}
22 | assert db._validate_error(job_dict) == job_dict
23 |
24 |
25 | def test_validate_error_with_dict_with_invalid_error():
26 | """_validate_error() should raise if given a dict with an invalid message."""
27 | job_dict = {"message": 42} # Error message is invalid: it's not a string.
28 | with pytest.raises(db.InvalidErrorObjectError):
29 | db._validate_error(job_dict)
30 |
31 |
32 | def test_validate_error_with_dict_with_no_error_key():
33 | """_validate_error() should raise if given a dict with no "message" key."""
34 | job_dict = {"foo": "bar"}
35 | with pytest.raises(db.InvalidErrorObjectError):
36 | db._validate_error(job_dict)
37 |
38 |
39 | def test_validate_error_with_random_object():
40 | """_validate_error() should raise if given an object of the wrong type."""
41 |
42 | class Foo(object):
43 | pass
44 |
45 | # An error object that is not None and is not string- or dict-like at all.
46 | error_obj = Foo()
47 |
48 | with pytest.raises(db.InvalidErrorObjectError):
49 | db._validate_error(error_obj)
50 |
--------------------------------------------------------------------------------
/doc/_themes/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2010 by Armin Ronacher.
2 |
3 | Some rights reserved.
4 |
5 | Redistribution and use in source and binary forms of the theme, with or
6 | without modification, are permitted provided that the following conditions
7 | are met:
8 |
9 | * Redistributions of source code must retain the above copyright
10 | notice, this list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above
13 | copyright notice, this list of conditions and the following
14 | disclaimer in the documentation and/or other materials provided
15 | with the distribution.
16 |
17 | * The names of the contributors may not be used to endorse or
18 | promote products derived from this software without specific
19 | prior written permission.
20 |
21 | We kindly ask you to only use these themes in an unmodified manner just
22 | for Flask and Flask-related products, not for unrelated projects. If you
23 | like the visual style and want to use it for your own projects, please
24 | consider making some larger changes to the themes (such as changing
25 | font faces, sizes, colors or margins).
26 |
27 | THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
28 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
30 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
31 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
32 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
33 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
34 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
35 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
36 | ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE
37 | POSSIBILITY OF SUCH DAMAGE.
38 |
--------------------------------------------------------------------------------
/ckanserviceprovider/util.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 |
4 | import logging
5 | import datetime
6 | from future.utils import python_2_unicode_compatible
7 |
8 | from . import db
9 |
10 |
11 | @python_2_unicode_compatible
12 | class JobError(Exception):
13 | """The exception type that jobs raise to signal failure."""
14 |
15 | def __init__(self, message):
16 | """Initialize a JobError with the given error message string.
17 |
18 | The error message string that you give here will be returned to the
19 | client site in the job dict's "error" key.
20 |
21 | """
22 | self.message = message
23 |
24 | def as_dict(self):
25 | """Return a dictionary representation of this JobError object.
26 |
27 | Returns a dictionary with a "message" key whose value is a string error
28 | message - suitable for use as the "error" key in a ckanserviceprovider
29 | job dict.
30 |
31 | """
32 | return {"message": self.message}
33 |
34 | def __str__(self):
35 | return self.message
36 |
37 |
38 | class StoringHandler(logging.Handler):
39 | """A handler that stores the logging records
40 | in the database."""
41 |
42 | def __init__(self, task_id, input):
43 | logging.Handler.__init__(self)
44 | self.task_id = task_id
45 | self.input = input
46 |
47 | def emit(self, record):
48 | conn = db.ENGINE.connect()
49 | try:
50 | # Turn strings into unicode to stop SQLAlchemy
51 | # "Unicode type received non-unicode bind param value" warnings.
52 | message = str(record.getMessage())
53 | level = str(record.levelname)
54 | module = str(record.module)
55 | funcName = str(record.funcName)
56 |
57 | conn.execute(
58 | db.LOGS_TABLE.insert().values(
59 | job_id=self.task_id,
60 | timestamp=datetime.datetime.now(),
61 | message=message,
62 | level=level,
63 | module=module,
64 | funcName=funcName,
65 | lineno=record.lineno,
66 | )
67 | )
68 | finally:
69 | conn.close()
70 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://github.com/ckan/ckan-service-provider/actions/workflows/test.yml)
2 | [](https://pypi.python.org/pypi/ckanserviceprovider/)
3 | [](https://pypi.python.org/pypi/ckanserviceprovider/)
4 | [](https://pypi.python.org/pypi/ckanserviceprovider/)
5 | [](https://pypi.python.org/pypi/ckanserviceprovider/)
6 | [](https://pypi.python.org/pypi/ckanserviceprovider/)
7 |
8 | [DataPusher]: https://github.com/okfn/datapusher
9 | [PyPI]: https://pypi.python.org/pypi/ckanserviceprovider
10 |
11 |
12 | # CKAN Service Provider
13 |
14 | A library for making web services that make functions available as synchronous
15 | or asynchronous jobs. Used by [DataPusher][].
16 |
17 |
18 | ## Getting Started
19 |
20 | To install ckanserviceprovider for development:
21 |
22 | ```bash
23 | git clone https://github.com/ckan/ckan-service-provider.git
24 | cd ckan-service-provider
25 | pip install -r requirements-dev.txt
26 | ```
27 |
28 | To get started making a web service with ckanserviceprovider have a look at
29 | [/example](example). You can run the example server with
30 | `python example/main.py example/settings_local.py`.
31 |
32 | For a real-world example have a look at [DataPusher][].
33 |
34 |
35 | ## Running the Tests
36 |
37 | To run the ckanserviceprovider tests:
38 |
39 | ```bash
40 | pytest
41 | ```
42 |
43 |
44 | ## Building the Documentation
45 |
46 | To build the ckanserviceprovider docs:
47 |
48 | ```bash
49 | python setup.py build_sphinx
50 | ```
51 |
52 |
53 | ## Releasing a New Version
54 |
55 | To release a new version of ckanserviceprovider:
56 |
57 | 1. Increment the version number in [setup.py](setup.py)
58 |
59 | 2. Build a source distribution of the new version and publish it to
60 | [PyPI][]:
61 |
62 | ```bash
63 | python setup.py sdist bdist_wheel
64 | pip install --upgrade twine
65 | twine upload dist/*
66 | ```
67 |
68 | You may want to test installing and running the new version from PyPI in a
69 | clean virtualenv before continuing to the next step.
70 |
71 | 3. Commit your setup.py changes to git, tag the release, and push the changes
72 | and the tag to GitHub:
73 |
74 | ```bash
75 | git commit setup.py -m "Bump version number"
76 | git tag 0.0.1
77 | git push
78 | git push origin 0.0.1
79 | ```
80 |
81 | (Replace both instances of 0.0.1 with the number of the version you're
82 | releasing.)
83 |
84 |
85 | ## Authors
86 |
87 | The original authors of ckanserviceprovider were
88 | David Raznick and
89 | Dominik Moritz . For the current list of contributors
90 | see [github.com/ckan/ckan-service-provider/contributors](https://github.com/ckan/ckan-service-provider/contributors)
91 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages # Always prefer setuptools over distutils
2 | from codecs import open # To use a consistent encoding
3 | from os import path
4 | import sys
5 |
6 |
7 | here = path.abspath(path.dirname(__file__))
8 |
9 | # Get the long description from the relevant file
10 | with open(path.join(here, "README.md"), encoding="utf-8") as f:
11 | long_description = f.read()
12 |
13 | install_requires = [
14 | "APScheduler>=2.1.2,<3.10.0",
15 | "Flask>=1.1.1,<3.0.0",
16 | "Werkzeug>=1.0.0,<3.0.0",
17 | "SQLAlchemy>=1.3.15,<1.4.0",
18 | "requests>=2.23.0",
19 | "future",
20 | ]
21 |
22 | if sys.version_info[1] <= 6:
23 | install_requires.append("flask-login==0.5.0")
24 | else:
25 | install_requires.append("flask-login==0.6.2")
26 |
27 |
28 | setup(
29 | name="ckanserviceprovider",
30 | # Versions should comply with PEP440. For a discussion on single-sourcing
31 | # the version across setup.py and the project code, see
32 | # http://packaging.python.org/en/latest/tutorial.html#version
33 | version="1.2.0",
34 | description="A library for making web services that make functions available as synchronous or asynchronous jobs",
35 | long_description=long_description,
36 | long_description_content_type="text/markdown",
37 | # The project's main homepage.
38 | url="https://github.com/ckan/ckan-service-provider",
39 | # Choose your license
40 | license="AGPL",
41 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers
42 | classifiers=[
43 | # How mature is this project? Common values are
44 | # 3 - Alpha
45 | # 4 - Beta
46 | # 5 - Production/Stable
47 | "Development Status :: 5 - Production/Stable",
48 | # Pick your license as you wish (should match "license" above)
49 | "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
50 | # Specify the Python versions you support here. In particular, ensure
51 | # that you indicate whether you support Python 2, Python 3 or both.
52 | "Programming Language :: Python :: 3.7",
53 | "Programming Language :: Python :: 3.8",
54 | "Programming Language :: Python :: 3.9",
55 | "Programming Language :: Python :: 3.10",
56 | "Programming Language :: Python :: 3.11",
57 | ],
58 | # What does your project relate to?
59 | keywords="ckan",
60 | # You can just specify the packages manually here if your project is
61 | # simple. Or you can use find_packages().
62 | packages=find_packages(exclude=["doc", "tests*"]),
63 | # List run-time dependencies here. These will be installed by pip when your
64 | # project is installed. For an analysis of "install_requires" vs pip's
65 | # requirements files see:
66 | # https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files
67 | install_requires=install_requires,
68 | # If there are data files included in your packages that need to be
69 | # installed, specify them here. If using Python 2.6 or less, then these
70 | # have to be included in MANIFEST.in as well.
71 | package_data={},
72 | # Although 'package_data' is the preferred approach, in some case you may
73 | # need to place data files outside of your packages.
74 | # see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
75 | # In this case, 'data_file' will be installed into '/my_data'
76 | data_files=[],
77 | # To provide executable scripts, use entry points in preference to the
78 | # "scripts" keyword. Entry points provide cross-platform support and allow
79 | # pip to create the appropriate form of executable for the target platform.
80 | entry_points={},
81 | )
82 |
--------------------------------------------------------------------------------
/doc/_themes/flask_theme_support.py:
--------------------------------------------------------------------------------
1 | # flasky extensions. flasky pygments style based on tango style
2 | from pygments.style import Style
3 | from pygments.token import (
4 | Keyword,
5 | Name,
6 | Comment,
7 | String,
8 | Error,
9 | Number,
10 | Operator,
11 | Generic,
12 | Whitespace,
13 | Punctuation,
14 | Other,
15 | Literal,
16 | )
17 |
18 |
19 | class FlaskyStyle(Style):
20 | background_color = "#f8f8f8"
21 | default_style = ""
22 |
23 | styles = {
24 | # No corresponding class for the following:
25 | # Text: "", # class: ''
26 | Whitespace: "underline #f8f8f8", # class: 'w'
27 | Error: "#a40000 border:#ef2929", # class: 'err'
28 | Other: "#000000", # class 'x'
29 | Comment: "italic #8f5902", # class: 'c'
30 | Comment.Preproc: "noitalic", # class: 'cp'
31 | Keyword: "bold #004461", # class: 'k'
32 | Keyword.Constant: "bold #004461", # class: 'kc'
33 | Keyword.Declaration: "bold #004461", # class: 'kd'
34 | Keyword.Namespace: "bold #004461", # class: 'kn'
35 | Keyword.Pseudo: "bold #004461", # class: 'kp'
36 | Keyword.Reserved: "bold #004461", # class: 'kr'
37 | Keyword.Type: "bold #004461", # class: 'kt'
38 | Operator: "#582800", # class: 'o'
39 | Operator.Word: "bold #004461", # class: 'ow' - like keywords
40 | Punctuation: "bold #000000", # class: 'p'
41 | # because special names such as Name.Class, Name.Function, etc.
42 | # are not recognized as such later in the parsing, we choose them
43 | # to look the same as ordinary variables.
44 | Name: "#000000", # class: 'n'
45 | Name.Attribute: "#c4a000", # class: 'na' - to be revised
46 | Name.Builtin: "#004461", # class: 'nb'
47 | Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
48 | Name.Class: "#000000", # class: 'nc' - to be revised
49 | Name.Constant: "#000000", # class: 'no' - to be revised
50 | Name.Decorator: "#888", # class: 'nd' - to be revised
51 | Name.Entity: "#ce5c00", # class: 'ni'
52 | Name.Exception: "bold #cc0000", # class: 'ne'
53 | Name.Function: "#000000", # class: 'nf'
54 | Name.Property: "#000000", # class: 'py'
55 | Name.Label: "#f57900", # class: 'nl'
56 | Name.Namespace: "#000000", # class: 'nn' - to be revised
57 | Name.Other: "#000000", # class: 'nx'
58 | Name.Tag: "bold #004461", # class: 'nt' - like a keyword
59 | Name.Variable: "#000000", # class: 'nv' - to be revised
60 | Name.Variable.Class: "#000000", # class: 'vc' - to be revised
61 | Name.Variable.Global: "#000000", # class: 'vg' - to be revised
62 | Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
63 | Number: "#990000", # class: 'm'
64 | Literal: "#000000", # class: 'l'
65 | Literal.Date: "#000000", # class: 'ld'
66 | String: "#4e9a06", # class: 's'
67 | String.Backtick: "#4e9a06", # class: 'sb'
68 | String.Char: "#4e9a06", # class: 'sc'
69 | String.Doc: "italic #8f5902", # class: 'sd' - like a comment
70 | String.Double: "#4e9a06", # class: 's2'
71 | String.Escape: "#4e9a06", # class: 'se'
72 | String.Heredoc: "#4e9a06", # class: 'sh'
73 | String.Interpol: "#4e9a06", # class: 'si'
74 | String.Other: "#4e9a06", # class: 'sx'
75 | String.Regex: "#4e9a06", # class: 'sr'
76 | String.Single: "#4e9a06", # class: 's1'
77 | String.Symbol: "#4e9a06", # class: 'ss'
78 | Generic: "#000000", # class: 'g'
79 | Generic.Deleted: "#a40000", # class: 'gd'
80 | Generic.Emph: "italic #000000", # class: 'ge'
81 | Generic.Error: "#ef2929", # class: 'gr'
82 | Generic.Heading: "bold #000080", # class: 'gh'
83 | Generic.Inserted: "#00A000", # class: 'gi'
84 | Generic.Output: "#888", # class: 'go'
85 | Generic.Prompt: "#745334", # class: 'gp'
86 | Generic.Strong: "bold #000000", # class: 'gs'
87 | Generic.Subheading: "bold #800080", # class: 'gu'
88 | Generic.Traceback: "bold #a40000", # class: 'gt'
89 | }
90 |
--------------------------------------------------------------------------------
/doc/_themes/flask_small/static/flasky.css_t:
--------------------------------------------------------------------------------
1 | /*
2 | * flasky.css_t
3 | * ~~~~~~~~~~~~
4 | *
5 | * Sphinx stylesheet -- flasky theme based on nature theme.
6 | *
7 | * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
8 | * :license: BSD, see LICENSE for details.
9 | *
10 | */
11 |
12 | @import url("basic.css");
13 |
14 | /* -- page layout ----------------------------------------------------------- */
15 |
16 | body {
17 | font-family: 'Georgia', serif;
18 | font-size: 17px;
19 | color: #000;
20 | background: white;
21 | margin: 0;
22 | padding: 0;
23 | }
24 |
25 | div.documentwrapper {
26 | float: left;
27 | width: 100%;
28 | }
29 |
30 | div.bodywrapper {
31 | margin: 40px auto 0 auto;
32 | width: 700px;
33 | }
34 |
35 | hr {
36 | border: 1px solid #B1B4B6;
37 | }
38 |
39 | div.body {
40 | background-color: #ffffff;
41 | color: #3E4349;
42 | padding: 0 30px 30px 30px;
43 | }
44 |
45 | img.floatingflask {
46 | padding: 0 0 10px 10px;
47 | float: right;
48 | }
49 |
50 | div.footer {
51 | text-align: right;
52 | color: #888;
53 | padding: 10px;
54 | font-size: 14px;
55 | width: 650px;
56 | margin: 0 auto 40px auto;
57 | }
58 |
59 | div.footer a {
60 | color: #888;
61 | text-decoration: underline;
62 | }
63 |
64 | div.related {
65 | line-height: 32px;
66 | color: #888;
67 | }
68 |
69 | div.related ul {
70 | padding: 0 0 0 10px;
71 | }
72 |
73 | div.related a {
74 | color: #444;
75 | }
76 |
77 | /* -- body styles ----------------------------------------------------------- */
78 |
79 | a {
80 | color: #004B6B;
81 | text-decoration: underline;
82 | }
83 |
84 | a:hover {
85 | color: #6D4100;
86 | text-decoration: underline;
87 | }
88 |
89 | div.body {
90 | padding-bottom: 40px; /* saved for footer */
91 | }
92 |
93 | div.body h1,
94 | div.body h2,
95 | div.body h3,
96 | div.body h4,
97 | div.body h5,
98 | div.body h6 {
99 | font-family: 'Garamond', 'Georgia', serif;
100 | font-weight: normal;
101 | margin: 30px 0px 10px 0px;
102 | padding: 0;
103 | }
104 |
105 | {% if theme_index_logo %}
106 | /*div.indexwrapper h1 {
107 | text-indent: -999999px;
108 | background: url({{ theme_index_logo }}) no-repeat center center;
109 | height: {{ theme_index_logo_height }};
110 | }*/
111 | {% endif %}
112 |
113 | div.body h2 { font-size: 180%; }
114 | div.body h3 { font-size: 150%; }
115 | div.body h4 { font-size: 130%; }
116 | div.body h5 { font-size: 100%; }
117 | div.body h6 { font-size: 100%; }
118 |
119 | a.headerlink {
120 | color: white;
121 | padding: 0 4px;
122 | text-decoration: none;
123 | }
124 |
125 | a.headerlink:hover {
126 | color: #444;
127 | background: #eaeaea;
128 | }
129 |
130 | div.body p, div.body dd, div.body li {
131 | line-height: 1.4em;
132 | }
133 |
134 | div.admonition {
135 | background: #fafafa;
136 | margin: 20px -30px;
137 | padding: 10px 30px;
138 | border-top: 1px solid #ccc;
139 | border-bottom: 1px solid #ccc;
140 | }
141 |
142 | div.admonition p.admonition-title {
143 | font-family: 'Garamond', 'Georgia', serif;
144 | font-weight: normal;
145 | font-size: 24px;
146 | margin: 0 0 10px 0;
147 | padding: 0;
148 | line-height: 1;
149 | }
150 |
151 | div.admonition p.last {
152 | margin-bottom: 0;
153 | }
154 |
155 | div.highlight{
156 | background-color: white;
157 | }
158 |
159 | dt:target, .highlight {
160 | background: #FAF3E8;
161 | }
162 |
163 | div.note {
164 | background-color: #eee;
165 | border: 1px solid #ccc;
166 | }
167 |
168 | div.seealso {
169 | background-color: #ffc;
170 | border: 1px solid #ff6;
171 | }
172 |
173 | div.topic {
174 | background-color: #eee;
175 | }
176 |
177 | div.warning {
178 | background-color: #ffe4e4;
179 | border: 1px solid #f66;
180 | }
181 |
182 | p.admonition-title {
183 | display: inline;
184 | }
185 |
186 | p.admonition-title:after {
187 | content: ":";
188 | }
189 |
190 | pre, tt {
191 | font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
192 | font-size: 0.85em;
193 | }
194 |
195 | img.screenshot {
196 | }
197 |
198 | tt.descname, tt.descclassname {
199 | font-size: 0.95em;
200 | }
201 |
202 | tt.descname {
203 | padding-right: 0.08em;
204 | }
205 |
206 | img.screenshot {
207 | -moz-box-shadow: 2px 2px 4px #eee;
208 | -webkit-box-shadow: 2px 2px 4px #eee;
209 | box-shadow: 2px 2px 4px #eee;
210 | }
211 |
212 | table.docutils {
213 | border: 1px solid #888;
214 | -moz-box-shadow: 2px 2px 4px #eee;
215 | -webkit-box-shadow: 2px 2px 4px #eee;
216 | box-shadow: 2px 2px 4px #eee;
217 | }
218 |
219 | table.docutils td, table.docutils th {
220 | border: 1px solid #888;
221 | padding: 0.25em 0.7em;
222 | }
223 |
224 | table.field-list, table.footnote {
225 | border: none;
226 | -moz-box-shadow: none;
227 | -webkit-box-shadow: none;
228 | box-shadow: none;
229 | }
230 |
231 | table.footnote {
232 | margin: 15px 0;
233 | width: 100%;
234 | border: 1px solid #eee;
235 | }
236 |
237 | table.field-list th {
238 | padding: 0 0.8em 0 0;
239 | }
240 |
241 | table.field-list td {
242 | padding: 0;
243 | }
244 |
245 | table.footnote td {
246 | padding: 0.5em;
247 | }
248 |
249 | dl {
250 | margin: 0;
251 | padding: 0;
252 | }
253 |
254 | dl dd {
255 | margin-left: 30px;
256 | }
257 |
258 | pre {
259 | padding: 0;
260 | margin: 15px -30px;
261 | padding: 8px;
262 | line-height: 1.3em;
263 | padding: 7px 30px;
264 | background: #eee;
265 | border-radius: 2px;
266 | -moz-border-radius: 2px;
267 | -webkit-border-radius: 2px;
268 | }
269 |
270 | dl pre {
271 | margin-left: -60px;
272 | padding-left: 60px;
273 | }
274 |
275 | tt {
276 | background-color: #ecf0f3;
277 | color: #222;
278 | /* padding: 1px 2px; */
279 | }
280 |
281 | tt.xref, a tt {
282 | background-color: #FBFBFB;
283 | }
284 |
285 | a:hover tt {
286 | background: #EEE;
287 | }
288 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
18 |
19 | help:
20 | @echo "Please use \`make ' where is one of"
21 | @echo " html to make standalone HTML files"
22 | @echo " dirhtml to make HTML files named index.html in directories"
23 | @echo " singlehtml to make a single large HTML file"
24 | @echo " pickle to make pickle files"
25 | @echo " json to make JSON files"
26 | @echo " htmlhelp to make HTML files and a HTML help project"
27 | @echo " qthelp to make HTML files and a qthelp project"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
31 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
32 | @echo " text to make text files"
33 | @echo " man to make manual pages"
34 | @echo " texinfo to make Texinfo files"
35 | @echo " info to make Texinfo files and run them through makeinfo"
36 | @echo " gettext to make PO message catalogs"
37 | @echo " changes to make an overview of all changed/added/deprecated items"
38 | @echo " linkcheck to check all external links for integrity"
39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
40 |
41 | clean:
42 | -rm -rf $(BUILDDIR)/*
43 |
44 | html:
45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
46 | @echo
47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
48 |
49 | dirhtml:
50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
51 | @echo
52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
53 |
54 | singlehtml:
55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
56 | @echo
57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
58 |
59 | pickle:
60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
61 | @echo
62 | @echo "Build finished; now you can process the pickle files."
63 |
64 | json:
65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
66 | @echo
67 | @echo "Build finished; now you can process the JSON files."
68 |
69 | htmlhelp:
70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
71 | @echo
72 | @echo "Build finished; now you can run HTML Help Workshop with the" \
73 | ".hhp project file in $(BUILDDIR)/htmlhelp."
74 |
75 | qthelp:
76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
77 | @echo
78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/messytables.qhcp"
81 | @echo "To view the help file:"
82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/messytables.qhc"
83 |
84 | devhelp:
85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
86 | @echo
87 | @echo "Build finished."
88 | @echo "To view the help file:"
89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/messytables"
90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/messytables"
91 | @echo "# devhelp"
92 |
93 | epub:
94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
95 | @echo
96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
97 |
98 | latex:
99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
100 | @echo
101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
103 | "(use \`make latexpdf' here to do that automatically)."
104 |
105 | latexpdf:
106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
107 | @echo "Running LaTeX files through pdflatex..."
108 | make -C $(BUILDDIR)/latex all-pdf
109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
110 |
111 | text:
112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
113 | @echo
114 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
115 |
116 | man:
117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
118 | @echo
119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
120 |
121 | texinfo:
122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
123 | @echo
124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
125 | @echo "Run \`make' in that directory to run these through makeinfo" \
126 | "(use \`make info' here to do that automatically)."
127 |
128 | info:
129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
130 | @echo "Running Texinfo files through makeinfo..."
131 | make -C $(BUILDDIR)/texinfo info
132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
133 |
134 | gettext:
135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
136 | @echo
137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
138 |
139 | changes:
140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
141 | @echo
142 | @echo "The overview file is in $(BUILDDIR)/changes."
143 |
144 | linkcheck:
145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
146 | @echo
147 | @echo "Link check complete; look for any errors in the above output " \
148 | "or in $(BUILDDIR)/linkcheck/output.txt."
149 |
150 | doctest:
151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
152 | @echo "Testing of doctests in the sources finished, look at the " \
153 | "results in $(BUILDDIR)/doctest/output.txt."
154 |
--------------------------------------------------------------------------------
/doc/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # messytables documentation build configuration file, created by
4 | # sphinx-quickstart on Sun Aug 14 17:09:50 2011.
5 | #
6 | # This file is execfile()d with the current directory set to its containing dir.
7 | #
8 | # Note that not all possible configuration values are present in this
9 | # autogenerated file.
10 | #
11 | # All configuration values have a default; values that are commented out
12 | # serve to show the default.
13 |
14 | import sys
15 | import os
16 |
17 | # If extensions (or modules to document with autodoc) are in another directory,
18 | # add these directories to sys.path here. If the directory is relative to the
19 | # documentation root, use os.path.abspath to make it absolute, like shown here.
20 | # sys.path.insert(0, os.path.abspath('.'))
21 |
22 | # -- General configuration -----------------------------------------------------
23 |
24 | # If your documentation needs a minimal Sphinx version, state it here.
25 | # needs_sphinx = '1.0'
26 |
27 | # Add any Sphinx extension module names here, as strings. They can be extensions
28 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
29 | extensions = ["sphinx.ext.autodoc", "sphinxcontrib.autohttp.flask"]
30 |
31 | # Add any paths that contain templates here, relative to this directory.
32 | templates_path = ["_templates"]
33 |
34 | # The suffix of source filenames.
35 | source_suffix = ".rst"
36 |
37 | # The encoding of source files.
38 | # source_encoding = 'utf-8-sig'
39 |
40 | # The master toctree document.
41 | master_doc = "index"
42 |
43 | # General information about the project.
44 | project = "ckanserviceprovider"
45 | copyright = "2013, Open Knowledge Foundation"
46 |
47 | # The version info for the project you're documenting, acts as replacement for
48 | # |version| and |release|, also used in various other places throughout the
49 | # built documents.
50 | #
51 | # The short X.Y version.
52 | version = "0.1"
53 | # The full version, including alpha/beta/rc tags.
54 | release = "0.1"
55 |
56 | # The language for content autogenerated by Sphinx. Refer to documentation
57 | # for a list of supported languages.
58 | # language = None
59 |
60 | # There are two options for replacing |today|: either, you set today to some
61 | # non-false value, then it is used:
62 | # today = ''
63 | # Else, today_fmt is used as the format for a strftime call.
64 | # today_fmt = '%B %d, %Y'
65 |
66 | # List of patterns, relative to source directory, that match files and
67 | # directories to ignore when looking for source files.
68 | exclude_patterns = ["_build"]
69 |
70 | # The reST default role (used for this markup: `text`) to use for all documents.
71 | # default_role = None
72 |
73 | # If true, '()' will be appended to :func: etc. cross-reference text.
74 | # add_function_parentheses = True
75 |
76 | # If true, the current module name will be prepended to all description
77 | # unit titles (such as .. function::).
78 | # add_module_names = True
79 |
80 | # If true, sectionauthor and moduleauthor directives will be shown in the
81 | # output. They are ignored by default.
82 | # show_authors = False
83 |
84 | # The name of the Pygments (syntax highlighting) style to use.
85 | pygments_style = "sphinx"
86 |
87 | # A list of ignored prefixes for module index sorting.
88 | # modindex_common_prefix = []
89 |
90 | # -- Options for HTML output ---------------------------------------------------
91 |
92 | # The theme to use for HTML and HTML Help pages. See the documentation for
93 | # a list of builtin themes.
94 | sys.path.append(os.path.abspath("_themes"))
95 | html_theme_path = ["_themes"]
96 | html_theme = "flask_small"
97 |
98 | # Theme options are theme-specific and customize the look and feel of a theme
99 | # further. For a list of options available for each theme, see the
100 | # documentation.
101 | # html_theme_options = {}
102 |
103 | # Add any paths that contain custom themes here, relative to this directory.
104 | # html_theme_path = []
105 |
106 | # The name for this set of Sphinx documents. If None, it defaults to
107 | # " v documentation".
108 | # html_title = None
109 |
110 | # A shorter title for the navigation bar. Default is the same as html_title.
111 | # html_short_title = None
112 |
113 | # The name of an image file (relative to this directory) to place at the top
114 | # of the sidebar.
115 | # html_logo = None
116 |
117 | # The name of an image file (within the static path) to use as favicon of the
118 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
119 | # pixels large.
120 | # html_favicon = None
121 |
122 | # Add any paths that contain custom static files (such as style sheets) here,
123 | # relative to this directory. They are copied after the builtin static files,
124 | # so a file named "default.css" will overwrite the builtin "default.css".
125 | html_static_path = ["_static"]
126 |
127 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
128 | # using the given strftime format.
129 | # html_last_updated_fmt = '%b %d, %Y'
130 |
131 | # If true, SmartyPants will be used to convert quotes and dashes to
132 | # typographically correct entities.
133 | # html_use_smartypants = True
134 |
135 | # Custom sidebar templates, maps document names to template names.
136 | # html_sidebars = {}
137 |
138 | # Additional templates that should be rendered to pages, maps page names to
139 | # template names.
140 | # html_additional_pages = {}
141 |
142 | # If false, no module index is generated.
143 | # html_domain_indices = True
144 |
145 | # If false, no index is generated.
146 | # html_use_index = True
147 |
148 | # If true, the index is split into individual pages for each letter.
149 | # html_split_index = False
150 |
151 | # If true, links to the reST sources are added to the pages.
152 | # html_show_sourcelink = True
153 |
154 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
155 | # html_show_sphinx = True
156 |
157 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
158 | # html_show_copyright = True
159 |
160 | # If true, an OpenSearch description file will be output, and all pages will
161 | # contain a tag referring to it. The value of this option must be the
162 | # base URL from which the finished HTML is served.
163 | # html_use_opensearch = ''
164 |
165 | # This is the file name suffix for HTML files (e.g. ".xhtml").
166 | # html_file_suffix = None
167 |
168 | # Output file base name for HTML help builder.
169 | htmlhelp_basename = "ckanservicedoc"
170 |
171 |
172 | # -- Options for LaTeX output --------------------------------------------------
173 |
174 | latex_elements = {
175 | # The paper size ('letterpaper' or 'a4paper').
176 | #'papersize': 'letterpaper',
177 | # The font size ('10pt', '11pt' or '12pt').
178 | #'pointsize': '10pt',
179 | # Additional stuff for the LaTeX preamble.
180 | #'preamble': '',
181 | }
182 |
183 | # Grouping the document tree into LaTeX files. List of tuples
184 | # (source start file, target name, title, author, documentclass [howto/manual]).
185 | latex_documents = [
186 | (
187 | "index",
188 | "ckanservice.tex",
189 | "CKAN Service Provider Documentation",
190 | "Open Knowledge Foundation",
191 | "manual",
192 | ),
193 | ]
194 |
195 | # The name of an image file (relative to this directory) to place at the top of
196 | # the title page.
197 | # latex_logo = None
198 |
199 | # For "manual" documents, if this is true, then toplevel headings are parts,
200 | # not chapters.
201 | # latex_use_parts = False
202 |
203 | # If true, show page references after internal links.
204 | # latex_show_pagerefs = False
205 |
206 | # If true, show URL addresses after external links.
207 | # latex_show_urls = False
208 |
209 | # Documents to append as an appendix to all manuals.
210 | # latex_appendices = []
211 |
212 | # If false, no module index is generated.
213 | # latex_domain_indices = True
214 |
215 |
216 | # -- Options for manual page output --------------------------------------------
217 |
218 | # One entry per manual page. List of tuples
219 | # (source start file, name, description, authors, manual section).
220 | man_pages = [
221 | (
222 | "index",
223 | "ckanservice",
224 | "CKAN Service Provider Documentation",
225 | ["Open Knowledge Foundation"],
226 | 1,
227 | )
228 | ]
229 |
230 | # If true, show URL addresses after external links.
231 | # man_show_urls = False
232 |
233 |
234 | # -- Options for Texinfo output ------------------------------------------------
235 |
236 | # Grouping the document tree into Texinfo files. List of tuples
237 | # (source start file, target name, title, author,
238 | # dir menu entry, description, category)
239 | texinfo_documents = [
240 | (
241 | "index",
242 | "ckanservice",
243 | "CKAN Service Provider Documentation",
244 | "Open Knowledge Foundation",
245 | "messytables",
246 | "One line description of project.",
247 | "Miscellaneous",
248 | ),
249 | ]
250 |
251 | # Documents to append as an appendix to all manuals.
252 | # texinfo_appendices = []
253 |
254 | # If false, no module index is generated.
255 | # texinfo_domain_indices = True
256 |
257 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
258 | # texinfo_show_urls = 'footnote'
259 |
--------------------------------------------------------------------------------
/ckanserviceprovider/db.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """A module that encapsulates CKAN Service Provider's model/database.
3 |
4 | This module provides a set of public functions that other modules should call
5 | to interact with the database, rather than using sqlalchemy directly.
6 |
7 | TODO: Some more refactoring is still needed to have the model completely
8 | encapsulated in this module, other modules are still using sqlalchemy directly
9 | in some places.
10 |
11 | The database contains:
12 |
13 | 1. A jobs table that tracks the statuses of pending and completed jobs
14 | 2. A metdata table that contains (key, value) metadata pairs associated with
15 | jobs
16 | 3. A logs table that contains log output from jobs.
17 |
18 | This module's public interface includes:
19 |
20 | init()
21 | Initialise the database connection on application or test startup, creating the
22 | database and tables if necessary.
23 |
24 | drop_all()
25 | Delete all the database tables, for tests.
26 |
27 | get_job()
28 | Get a dictionary representation of a job from the database.
29 |
30 | add_pending_job()
31 | Add a new job with status "pending" to the database.
32 | Jobs always have status "pending" when they're first added.
33 |
34 | mark_job_as_*()
35 | A set of functions for updating an existing job in the database.
36 | There are a limited number of ways in which CKAN Service Provider updates jobs.
37 | For example marking a job as completed successfully, or marking a job as
38 | failed with an error. These mark_job_as_*() functions define all the ways that
39 | a job can be updated.
40 |
41 | See the functions' own docstrings for details.
42 |
43 | """
44 |
45 | import datetime
46 | import json
47 |
48 | import sqlalchemy
49 |
50 |
51 | # Some module-global constants. Some of these are accessed directly by other
52 | # modules. It would be good to factor these out.
53 | ENGINE = None
54 | _METADATA = None
55 | JOBS_TABLE = None
56 | METADATA_TABLE = None
57 | LOGS_TABLE = None
58 |
59 |
60 | def init(uri, echo=False):
61 | """Initialise the database.
62 |
63 | Initialise the sqlalchemy engine, metadata and table objects that we use to
64 | connect to the database.
65 |
66 | Create the database and the database tables themselves if they don't
67 | already exist.
68 |
69 | :param uri: the sqlalchemy database URI
70 | :type uri: string
71 |
72 | :param echo: whether or not to have the sqlalchemy engine log all
73 | statements to stdout
74 | :type echo: bool
75 |
76 | """
77 | global ENGINE, _METADATA, JOBS_TABLE, METADATA_TABLE, LOGS_TABLE
78 | ENGINE = sqlalchemy.create_engine(uri, echo=echo, convert_unicode=True)
79 | _METADATA = sqlalchemy.MetaData(ENGINE)
80 | JOBS_TABLE = _init_jobs_table()
81 | METADATA_TABLE = _init_metadata_table()
82 | LOGS_TABLE = _init_logs_table()
83 | _METADATA.create_all(ENGINE)
84 |
85 |
86 | def drop_all():
87 | """Delete all the database tables (if they exist).
88 |
89 | This is for tests to reset the DB. Note that this will delete *all* tables
90 | in the database, not just tables created by this module (for example
91 | apscheduler's tables will also be deleted).
92 |
93 | """
94 | if _METADATA:
95 | _METADATA.drop_all(ENGINE)
96 |
97 |
98 | def get_job(job_id, limit=None, use_aps_id=False):
99 | """Return the job with the given job_id as a dict.
100 |
101 | The dict also includes any metadata or logs associated with the job.
102 |
103 | Returns None instead of a dict if there's no job with the given job_id.
104 |
105 | The keys of a job dict are:
106 |
107 | "job_id": The unique identifier for the job (unicode)
108 |
109 | "job_type": The name of the job function that will be executed for this
110 | job (unicode)
111 |
112 | "status": The current status of the job, e.g. "pending", "complete", or
113 | "error" (unicode)
114 |
115 | "data": Any output data returned by the job if it has completed
116 | successfully. This may be any JSON-serializable type, e.g. None, a
117 | string, a dict, etc.
118 |
119 | "error": If the job failed with an error this will be a dict with a
120 | "message" key whose value is a string error message. The dict may also
121 | have other keys specific to the particular type of error. If the job
122 | did not fail with an error then "error" will be None.
123 |
124 | "requested_timestamp": The time at which the job was requested (string)
125 |
126 | "finished_timestamp": The time at which the job finished (string)
127 |
128 | "sent_data": The input data for the job, provided by the client site.
129 | This may be any JSON-serializable type, e.g. None, a string, a dict,
130 | etc.
131 |
132 | "result_url": The callback URL that CKAN Service Provider will post the
133 | result to when the job finishes (unicode)
134 |
135 | "api_key": The API key that CKAN Service Provider will use when posting
136 | the job result to the result_url (unicode or None). A None here doesn't
137 | mean that there was no API key: CKAN Service Provider deletes the API
138 | key from the database after it has posted the result to the result_url.
139 |
140 | "job_key": The key that users must provide (in the Authorization header of
141 | the HTTP request) to be authorized to modify the job (unicode).
142 | For example requests to the CKAN Service Provider API need this to get
143 | the status or output data of a job or to delete a job.
144 | If you login to CKAN Service Provider as an administrator then you can
145 | administer any job without providing its job_key.
146 |
147 | "metadata": Any custom metadata associated with the job (dict)
148 |
149 | "logs": Any logs associated with the job (list)
150 |
151 | """
152 | # Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
153 | # warnings.
154 | if job_id:
155 | job_id = str(job_id)
156 |
157 | if use_aps_id:
158 | result = ENGINE.execute(
159 | JOBS_TABLE.select().where(JOBS_TABLE.c.aps_job_id == job_id)
160 | ).first()
161 | else:
162 | result = ENGINE.execute(
163 | JOBS_TABLE.select().where(JOBS_TABLE.c.job_id == job_id)
164 | ).first()
165 |
166 | if not result:
167 | return None
168 |
169 | # Turn the result into a dictionary representation of the job.
170 | result_dict = {}
171 | for field in list(result.keys()):
172 | value = getattr(result, field)
173 | if value is None:
174 | result_dict[field] = value
175 | elif field in ("sent_data", "data", "error"):
176 | result_dict[field] = json.loads(value)
177 | elif isinstance(value, datetime.datetime):
178 | result_dict[field] = value.isoformat()
179 | else:
180 | result_dict[field] = str(value)
181 |
182 | result_dict["metadata"] = _get_metadata(job_id)
183 | result_dict["logs"] = _get_logs(job_id, limit=limit)
184 |
185 | return result_dict
186 |
187 |
188 | def add_pending_job(
189 | job_id, job_key, job_type, api_key, data=None, metadata=None, result_url=None
190 | ):
191 | """Add a new job with status "pending" to the jobs table.
192 |
193 | All code that adds jobs to the jobs table should go through this function.
194 | Code that adds to the jobs table manually should be refactored to use this
195 | function.
196 |
197 | May raise unspecified exceptions from Python core, SQLAlchemy or JSON!
198 | TODO: Document and unit test these!
199 |
200 | :param job_id: a unique identifier for the job, used as the primary key in
201 | ckanserviceprovider's "jobs" database table
202 | :type job_id: unicode
203 |
204 | :param job_key: the key required to administer the job via the API
205 | :type job_key: unicode
206 |
207 | :param job_type: the name of the job function that will be executed for
208 | this job
209 | :type job_key: unicode
210 |
211 | :param api_key: the client site API key that ckanserviceprovider will use
212 | when posting the job result to the result_url
213 | :type api_key: unicode
214 |
215 | :param data: The input data for the job (called sent_data elsewhere)
216 | :type data: Any JSON-serializable type
217 |
218 | :param metadata: A dict of arbitrary (key, value) metadata pairs to be
219 | stored along with the job. The keys should be strings, the values can
220 | be strings or any JSON-encodable type.
221 | :type metadata: dict
222 |
223 | :param result_url: the callback URL that ckanserviceprovider will post the
224 | job result to when the job has finished
225 | :type result_url: unicode
226 |
227 | """
228 | if not data:
229 | data = {}
230 | data = json.dumps(data)
231 |
232 | # Turn strings into unicode to stop SQLAlchemy
233 | # "Unicode type received non-unicode bind param value" warnings.
234 | if job_id:
235 | job_id = str(job_id)
236 | if job_type:
237 | job_type = str(job_type)
238 | if result_url:
239 | result_url = str(result_url)
240 | if api_key:
241 | api_key = str(api_key)
242 | if job_key:
243 | job_key = str(job_key)
244 | data = str(data)
245 |
246 | if not metadata:
247 | metadata = {}
248 |
249 | conn = ENGINE.connect()
250 | trans = conn.begin()
251 | try:
252 | conn.execute(
253 | JOBS_TABLE.insert().values(
254 | job_id=job_id,
255 | job_type=job_type,
256 | status="pending",
257 | requested_timestamp=datetime.datetime.now(),
258 | sent_data=data,
259 | result_url=result_url,
260 | api_key=api_key,
261 | job_key=job_key,
262 | )
263 | )
264 |
265 | # Insert any (key, value) metadata pairs that the job has into the
266 | # metadata table.
267 | inserts = []
268 | for key, value in list(metadata.items()):
269 | type_ = "string"
270 | if not isinstance(value, str):
271 | value = json.dumps(value)
272 | type_ = "json"
273 |
274 | # Turn strings into unicode to stop SQLAlchemy
275 | # "Unicode type received non-unicode bind param value" warnings.
276 | key = str(key)
277 | value = str(value)
278 |
279 | inserts.append(
280 | {"job_id": job_id, "key": key, "value": value, "type": type_}
281 | )
282 | if inserts:
283 | conn.execute(METADATA_TABLE.insert(), inserts)
284 | trans.commit()
285 | except Exception:
286 | trans.rollback()
287 | raise
288 | finally:
289 | conn.close()
290 |
291 |
292 | class InvalidErrorObjectError(Exception):
293 | pass
294 |
295 |
296 | def _validate_error(error):
297 | """Validate and return the given error object.
298 |
299 | Based on the given error object, return either None or a dict with a
300 | "message" key whose value is a string (the dict may also have any other
301 | keys that it wants).
302 |
303 | The given "error" object can be:
304 |
305 | - None, in which case None is returned
306 |
307 | - A string, in which case a dict like this will be returned:
308 | {"message": error_string}
309 |
310 | - A dict with a "message" key whose value is a string, in which case the
311 | dict will be returned unchanged
312 |
313 | :param error: the error object to validate
314 |
315 | :raises InvalidErrorObjectError: If the error object doesn't match any of
316 | the allowed types
317 |
318 | """
319 | if error is None:
320 | return None
321 | elif isinstance(error, str):
322 | return {"message": error}
323 | else:
324 | try:
325 | message = error["message"]
326 | if isinstance(message, str):
327 | return error
328 | else:
329 | raise InvalidErrorObjectError("error['message'] must be a string")
330 | except (TypeError, KeyError):
331 | raise InvalidErrorObjectError(
332 | "error must be either a string or a dict with a message key"
333 | )
334 |
335 |
336 | def _update_job(job_id, job_dict):
337 | """Update the database row for the given job_id with the given job_dict.
338 |
339 | All functions that update rows in the jobs table do it by calling this
340 | helper function.
341 |
342 | job_dict is a dict with values corresponding to the database columns that
343 | should be updated, e.g.:
344 |
345 | {"status": "complete", "data": ...}
346 |
347 | """
348 | # Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
349 | # warnings.
350 | if job_id:
351 | job_id = str(job_id)
352 |
353 | if "error" in job_dict:
354 | job_dict["error"] = _validate_error(job_dict["error"])
355 | job_dict["error"] = json.dumps(job_dict["error"])
356 | # Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
357 | # warnings.
358 | job_dict["error"] = str(job_dict["error"])
359 |
360 | # Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
361 | # warnings.
362 | if "data" in job_dict:
363 | job_dict["data"] = str(job_dict["data"])
364 |
365 | ENGINE.execute(
366 | JOBS_TABLE.update().where(JOBS_TABLE.c.job_id == job_id).values(**job_dict)
367 | )
368 |
369 |
370 | def mark_job_as_completed(job_id, data=None):
371 | """Mark a job as completed successfully.
372 |
373 | :param job_id: the job_id of the job to be updated
374 | :type job_id: unicode
375 |
376 | :param data: the output data returned by the job
377 | :type data: any JSON-serializable type (including None)
378 |
379 | """
380 | update_dict = {
381 | "status": "complete",
382 | "data": json.dumps(data),
383 | "finished_timestamp": datetime.datetime.now(),
384 | }
385 | _update_job(job_id, update_dict)
386 |
387 |
388 | def mark_job_as_missed(job_id):
389 | """Mark a job as missed because it was in the queue for too long.
390 |
391 | :param job_id: the job_id of the job to be updated
392 | :type job_id: unicode
393 |
394 | """
395 | update_dict = {
396 | "status": "error",
397 | "error": "Job delayed too long, service full",
398 | "finished_timestamp": datetime.datetime.now(),
399 | }
400 | _update_job(job_id, update_dict)
401 |
402 |
403 | def mark_job_as_errored(job_id, error_object):
404 | """Mark a job as failed with an error.
405 |
406 | :param job_id: the job_id of the job to be updated
407 | :type job_id: unicode
408 |
409 | :param error_object: the error returned by the job
410 | :type error_object: either a string or a dict with a "message" key whose
411 | value is a string
412 |
413 | """
414 | update_dict = {
415 | "status": "error",
416 | "error": error_object,
417 | "finished_timestamp": datetime.datetime.now(),
418 | }
419 | _update_job(job_id, update_dict)
420 |
421 |
422 | def mark_job_as_failed_to_post_result(job_id):
423 | """Mark a job as 'failed to post result'.
424 |
425 | This happens when a job completes (either successfully or with an error)
426 | then trying to post the job result back to the job's callback URL fails.
427 |
428 | FIXME: This overwrites any error from the job itself!
429 |
430 | :param job_id: the job_id of the job to be updated
431 | :type job_id: unicode
432 |
433 | """
434 | update_dict = {
435 | "error": "Process completed but unable to post to result_url",
436 | }
437 | _update_job(job_id, update_dict)
438 |
439 |
440 | def delete_api_key(job_id):
441 | """Delete the given job's API key from the database.
442 |
443 | The API key is used when posting the job's result to the client's callback
444 | URL. This function should be called to delete the API key after the result
445 | has been posted - the API key is no longer needed.
446 |
447 | """
448 | _update_job(job_id, {"api_key": None})
449 |
450 |
451 | def set_aps_job_id(job_id, aps_job_id):
452 |
453 | _update_job(job_id, {"aps_job_id": aps_job_id})
454 |
455 |
456 | def _init_jobs_table():
457 | """Initialise the "jobs" table in the db."""
458 | _jobs_table = sqlalchemy.Table(
459 | "jobs",
460 | _METADATA,
461 | sqlalchemy.Column("job_id", sqlalchemy.UnicodeText, primary_key=True),
462 | sqlalchemy.Column("job_type", sqlalchemy.UnicodeText),
463 | sqlalchemy.Column("status", sqlalchemy.UnicodeText, index=True),
464 | sqlalchemy.Column("data", sqlalchemy.UnicodeText),
465 | sqlalchemy.Column("error", sqlalchemy.UnicodeText),
466 | sqlalchemy.Column("requested_timestamp", sqlalchemy.DateTime),
467 | sqlalchemy.Column("finished_timestamp", sqlalchemy.DateTime),
468 | sqlalchemy.Column("sent_data", sqlalchemy.UnicodeText),
469 | sqlalchemy.Column("aps_job_id", sqlalchemy.UnicodeText),
470 | # Callback URL:
471 | sqlalchemy.Column("result_url", sqlalchemy.UnicodeText),
472 | # CKAN API key:
473 | sqlalchemy.Column("api_key", sqlalchemy.UnicodeText),
474 | # Key to administer job:
475 | sqlalchemy.Column("job_key", sqlalchemy.UnicodeText),
476 | )
477 | return _jobs_table
478 |
479 |
480 | def _init_metadata_table():
481 | """Initialise the "metadata" table in the db."""
482 | _metadata_table = sqlalchemy.Table(
483 | "metadata",
484 | _METADATA,
485 | sqlalchemy.Column(
486 | "job_id",
487 | sqlalchemy.ForeignKey("jobs.job_id", ondelete="CASCADE"),
488 | nullable=False,
489 | primary_key=True,
490 | ),
491 | sqlalchemy.Column("key", sqlalchemy.UnicodeText, primary_key=True),
492 | sqlalchemy.Column("value", sqlalchemy.UnicodeText, index=True),
493 | sqlalchemy.Column("type", sqlalchemy.UnicodeText),
494 | )
495 | return _metadata_table
496 |
497 |
498 | def _init_logs_table():
499 | """Initialise the "logs" table in the db."""
500 | _logs_table = sqlalchemy.Table(
501 | "logs",
502 | _METADATA,
503 | sqlalchemy.Column(
504 | "job_id",
505 | sqlalchemy.ForeignKey("jobs.job_id", ondelete="CASCADE"),
506 | nullable=False,
507 | ),
508 | sqlalchemy.Column("timestamp", sqlalchemy.DateTime),
509 | sqlalchemy.Column("message", sqlalchemy.UnicodeText),
510 | sqlalchemy.Column("level", sqlalchemy.UnicodeText),
511 | sqlalchemy.Column("module", sqlalchemy.UnicodeText),
512 | sqlalchemy.Column("funcName", sqlalchemy.UnicodeText),
513 | sqlalchemy.Column("lineno", sqlalchemy.Integer),
514 | )
515 | return _logs_table
516 |
517 |
518 | def _get_metadata(job_id):
519 | """Return any metadata for the given job_id from the metadata table."""
520 | # Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
521 | # warnings.
522 | job_id = str(job_id)
523 |
524 | results = ENGINE.execute(
525 | METADATA_TABLE.select().where(METADATA_TABLE.c.job_id == job_id)
526 | ).fetchall()
527 | metadata = {}
528 | for row in results:
529 | value = row["value"]
530 | if row["type"] == "json":
531 | value = json.loads(value)
532 | metadata[row["key"]] = value
533 | return metadata
534 |
535 |
536 | def _get_logs(job_id, limit=None):
537 | """Return any logs for the given job_id from the logs table."""
538 | # Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
539 | # warnings.
540 | job_id = str(job_id)
541 | try:
542 | int(limit)
543 | limit_is_valid = True
544 | except (ValueError, TypeError):
545 | # None or "one" (or similar)
546 | limit_is_valid = False
547 |
548 | if not limit_is_valid:
549 | results = ENGINE.execute(
550 | LOGS_TABLE.select().where(LOGS_TABLE.c.job_id == job_id)
551 | ).fetchall()
552 | else:
553 | results = ENGINE.execute(
554 | LOGS_TABLE.select()
555 | .where(LOGS_TABLE.c.job_id == job_id)
556 | .order_by(LOGS_TABLE.c.timestamp.desc())
557 | .limit(limit)
558 | ).fetchall()
559 |
560 | results = [dict(result) for result in results]
561 |
562 | for result in results:
563 | result.pop("job_id")
564 |
565 | return results
566 |
567 |
568 | def add_logs(job_id, message=None, level=None, module=None, funcName=None, lineno=None):
569 | """for tests only"""
570 | if job_id:
571 | job_id = str(job_id)
572 | if message:
573 | message = str(message)
574 | if level:
575 | level = str(level)
576 | if module:
577 | module = str(module)
578 | if funcName:
579 | funcName = str(funcName)
580 | if lineno:
581 | lineno = str(lineno)
582 | conn = ENGINE.connect()
583 | trans = conn.begin()
584 |
585 | conn.execute(
586 | LOGS_TABLE.insert().values(
587 | job_id=job_id,
588 | timestamp=datetime.datetime.now(),
589 | message=message,
590 | level=level,
591 | module=module,
592 | funcName=funcName,
593 | lineno=lineno,
594 | )
595 | )
596 | trans.commit()
597 | conn.close()
598 |
--------------------------------------------------------------------------------
/ckanserviceprovider/web.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 |
4 | import uuid
5 | import datetime
6 | import sys
7 | import json
8 | import traceback
9 | import logging
10 | import logging.handlers
11 |
12 | import flask
13 | import flask_login as flogin
14 | import werkzeug
15 | from apscheduler.schedulers.background import BackgroundScheduler as Scheduler
16 | import apscheduler.events as events
17 | import apscheduler.jobstores.sqlalchemy as sqlalchemy_store
18 | from apscheduler.triggers.date import DateTrigger
19 | import sqlalchemy.sql as sql
20 | import sqlalchemy as sa
21 | import requests
22 | from werkzeug.middleware.proxy_fix import ProxyFix
23 |
24 | from . import db
25 | from . import util
26 | from . import default_settings
27 |
28 | # Some module-global constants. Some of these are accessed directly by other
29 | # modules. It would be good to factor out as many of these as possible.
30 | sync_types = {}
31 | async_types = {}
32 | job_statuses = ["pending", "complete", "error"]
33 | app = flask.Flask(__name__)
34 | scheduler = None
35 | _users = None
36 | _names = None
37 | SSL_VERIFY = None
38 |
39 |
40 | def init():
41 | """Initialise and configure the app, database, scheduler, etc.
42 |
43 | This should be called once at application startup or at tests startup
44 | (and not e.g. called once for each test case).
45 |
46 | """
47 | global _users, _names
48 | _configure_app(app)
49 | _users, _names = _init_login_manager(app)
50 | _configure_logger()
51 | init_scheduler(app.config.get("SQLALCHEMY_DATABASE_URI"))
52 | db.init(app.config.get("SQLALCHEMY_DATABASE_URI"))
53 |
54 |
55 | def _configure_app(app_):
56 | """Configure the Flask WSGI app."""
57 | app_.url_map.strict_slashes = False
58 | app_.config.from_object(default_settings)
59 | app_.config.from_envvar("JOB_CONFIG", silent=True)
60 | db_url = app_.config.get("SQLALCHEMY_DATABASE_URI")
61 | if not db_url:
62 | raise Exception("No db_url in config")
63 | app_.wsgi_app = ProxyFix(app_.wsgi_app)
64 |
65 | global SSL_VERIFY
66 | if app_.config.get("SSL_VERIFY") in ["False", "FALSE", "0", False, 0]:
67 | SSL_VERIFY = False
68 | else:
69 | SSL_VERIFY = True
70 |
71 | return app_
72 |
73 |
74 | def _init_login_manager(app_):
75 | """Initialise and configure the login manager."""
76 | login_manager = flogin.LoginManager()
77 | login_manager.setup_app(app_)
78 | login_manager.anonymous_user = Anonymous
79 | login_manager.login_view = "login"
80 |
81 | users = {app_.config["USERNAME"]: User("Admin", 0)}
82 | names = dict((int(v.get_id()), k) for k, v in list(users.items()))
83 |
84 | @login_manager.user_loader
85 | def load_user(userid):
86 | userid = int(userid)
87 | name = names.get(userid)
88 | return users.get(name)
89 |
90 | return users, names
91 |
92 |
93 | def _configure_logger_for_production(logger):
94 | """Configure the given logger for production deployment.
95 |
96 | Logs to stderr and file, and emails errors to admins.
97 |
98 | """
99 | stderr_handler = logging.StreamHandler(sys.stderr)
100 | stderr_handler.setLevel(logging.INFO)
101 | ts_formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
102 | if "STDERR" in app.config and app.config["STDERR"]:
103 | stderr_handler.setFormatter(ts_formatter)
104 | logger.addHandler(stderr_handler)
105 |
106 | if "LOG_FILE" in app.config and app.config["LOG_FILE"]:
107 | file_handler = logging.handlers.RotatingFileHandler(
108 | app.config.get("LOG_FILE"), maxBytes=67108864, backupCount=5
109 | )
110 | file_handler.setLevel(logging.INFO)
111 | file_handler.setFormatter(ts_formatter)
112 | logger.addHandler(file_handler)
113 |
114 | mail_handler = logging.handlers.SMTPHandler(
115 | "127.0.0.1",
116 | app.config.get("FROM_EMAIL"),
117 | app.config.get("ADMINS", []),
118 | "CKAN Service Error",
119 | )
120 | mail_handler.setLevel(logging.ERROR)
121 | if "FROM_EMAIL" in app.config:
122 | logger.addHandler(mail_handler)
123 |
124 |
125 | def _configure_logger_for_debugging(logger):
126 | """Configure the given logger for debug mode."""
127 | logger.addHandler(app.logger.handlers[0])
128 |
129 |
130 | def _configure_logger():
131 | """Configure the logging module."""
132 | if not app.debug:
133 | _configure_logger_for_production(logging.getLogger())
134 | elif not app.testing:
135 | _configure_logger_for_debugging(logging.getLogger())
136 |
137 |
138 | def init_scheduler(db_uri):
139 | """Initialise and configure the scheduler."""
140 | global scheduler
141 | scheduler = Scheduler()
142 | scheduler.misfire_grace_time = 3600
143 | scheduler.add_jobstore(sqlalchemy_store.SQLAlchemyJobStore(url=db_uri), "default")
144 | scheduler.add_listener(
145 | job_listener,
146 | events.EVENT_JOB_EXECUTED | events.EVENT_JOB_MISSED | events.EVENT_JOB_ERROR,
147 | )
148 | return scheduler
149 |
150 |
151 | class User(flogin.UserMixin):
152 | def __init__(self, name, id, active=True):
153 | self.name = name
154 | self.id = id
155 | self.active = active
156 |
157 | def is_active(self):
158 | return self.active
159 |
160 |
161 | class Anonymous(flogin.AnonymousUserMixin):
162 | name = "Anonymous"
163 |
164 |
165 | class RunNowTrigger(object):
166 | """Custom apscheduler trigger to run job once and only
167 | once"""
168 |
169 | def __init__(self):
170 | self.run = False
171 |
172 | def get_next_fire_time(self, start_date):
173 | if not self.run:
174 | self.run = True
175 | return datetime.datetime.now()
176 |
177 | def __str__(self):
178 | return "RunTriggerNow, run = %s" % self.run
179 |
180 | def __repr__(self):
181 | return "RunTriggerNow, run = %s" % self.run
182 |
183 |
184 | def job_listener(event):
185 | """Listens to completed job"""
186 | aps_job_id = event.job_id
187 |
188 | job = db.get_job(aps_job_id, use_aps_id=True)
189 | job_id = job['job_id']
190 |
191 | if event.code == events.EVENT_JOB_MISSED:
192 | db.mark_job_as_missed(job_id)
193 | elif event.exception:
194 | if isinstance(event.exception, util.JobError):
195 | error_object = event.exception.as_dict()
196 | else:
197 | error_object = "\n".join(
198 | [event.traceback] + [repr(event.exception)]
199 | )
200 | db.mark_job_as_errored(job_id, error_object)
201 | else:
202 | db.mark_job_as_completed(job_id, event.retval)
203 | api_key = db.get_job(job_id)["api_key"]
204 | result_ok = send_result(job_id, api_key)
205 |
206 | if not result_ok:
207 | db.mark_job_as_failed_to_post_result(job_id)
208 |
209 | # Optionally notify tests that job_listener() has finished.
210 | if "_TEST_CALLBACK_URL" in app.config:
211 | requests.get(app.config["_TEST_CALLBACK_URL"])
212 |
213 |
214 | headers = {str("Content-Type"): str("application/json")}
215 |
216 |
217 | @app.route("/", methods=["GET"])
218 | def index():
219 | """Show link to documentation.
220 |
221 | :rtype: A dictionary with the following keys
222 | :param help: Help text
223 | :type help: string
224 | """
225 | return flask.jsonify(
226 | help="""
227 | Get help at:
228 | http://ckan-service-provider.readthedocs.org/."""
229 | )
230 |
231 |
232 | @app.route("/status", methods=["GET"])
233 | def status():
234 | """Show version, available job types and name of service.
235 |
236 | **Results:**
237 |
238 | :rtype: A dictionary with the following keys
239 | :param version: Version of the service provider
240 | :type version: float
241 | :param job_types: Available job types
242 | :type job_types: list of strings
243 | :param name: Name of the service
244 | :type name: string
245 | :param stats: Shows stats for jobs in queue
246 | :type stats: dictionary
247 | """
248 | job_types = sorted(list(async_types.keys()) + list(sync_types.keys()))
249 |
250 | counts = {}
251 | for job_status in job_statuses:
252 | counts[job_status] = db.ENGINE.execute(
253 | db.JOBS_TABLE.count().where(db.JOBS_TABLE.c.status == job_status)
254 | ).first()[0]
255 |
256 | return flask.jsonify(
257 | version=0.1,
258 | job_types=job_types,
259 | name=app.config.get("NAME", "example"),
260 | stats=counts,
261 | )
262 |
263 |
264 | def check_auth(username, password):
265 | """This function is called to check if a username /
266 | password combination is valid.
267 | """
268 | return username == app.config["USERNAME"] and password == app.config["PASSWORD"]
269 |
270 |
271 | @app.route("/login", methods=["POST", "GET"])
272 | def login():
273 | """Log in as administrator
274 |
275 | You can use wither basic auth or form based login (via POST).
276 |
277 | :param username: The administrator's username
278 | :type username: string
279 | :param password: The administrator's password
280 | :type password: string
281 | """
282 | username = None
283 | password = None
284 | next = flask.request.args.get("next")
285 | auth = flask.request.authorization
286 |
287 | if flask.request.method == "POST":
288 | username = flask.request.form["username"]
289 | password = flask.request.form["password"]
290 |
291 | if auth and auth.type == "basic":
292 | username = auth.username
293 | password = auth.password
294 |
295 | if not flogin.current_user.is_active:
296 | error = "You have to login with proper credentials"
297 | if username and password:
298 | if check_auth(username, password):
299 | user = _users.get(username)
300 | if user:
301 | if flogin.login_user(user):
302 | return flask.redirect(next or flask.url_for("user"))
303 | error = "Could not log in user."
304 | else:
305 | error = "User not found."
306 | else:
307 | error = "Wrong username or password."
308 | else:
309 | error = "No username or password."
310 | return flask.Response(
311 | "Could not verify your access level for that URL.\n {}".format(error),
312 | 401,
313 | {str("WWW-Authenticate"): str('Basic realm="Login Required"')},
314 | )
315 | return flask.redirect(next or flask.url_for("user"))
316 |
317 |
318 | @app.route("/user", methods=["GET"])
319 | def user():
320 | """Show information about the current user
321 |
322 | :rtype: A dictionary with the following keys
323 | :param id: User id
324 | :type id: int
325 | :param name: User name
326 | :type name: string
327 | :param is_active: Whether the user is currently active
328 | :type is_active: bool
329 | :param is_anonymous: The anonymous user is the default user if you
330 | are not logged in
331 | :type is_anonymous: bool
332 | """
333 | user = flogin.current_user
334 | return flask.jsonify(
335 | {
336 | "id": user.get_id(),
337 | "name": user.name,
338 | "is_active": user.is_active(),
339 | "is_anonymous": user.is_anonymous,
340 | }
341 | )
342 |
343 |
344 | @app.route("/logout")
345 | def logout():
346 | """Log out the active user"""
347 | flogin.logout_user()
348 | next = flask.request.args.get("next")
349 | return flask.redirect(next or flask.url_for("user"))
350 |
351 |
352 | @app.route("/job", methods=["GET"])
353 | def job_list():
354 | """List all jobs.
355 |
356 | :param _limit: maximum number of jobs to show (default 100)
357 | :type _limit: int
358 | :param _offset: how many jobs to skip before showin the first one (default 0)
359 | :type _offset: int
360 | :param _status: filter jobs by status (complete, error)
361 | :type _status: string
362 |
363 | Also, you can filter the jobs by their metadata. Use the metadata key
364 | as parameter key and the value as value.
365 |
366 | :rtype: A list of job ids
367 | """
368 | args = dict((key, value) for key, value in list(flask.request.args.items()))
369 | limit = args.pop("_limit", 100)
370 | offset = args.pop("_offset", 0)
371 |
372 | select = (
373 | sql.select(
374 | [db.JOBS_TABLE.c.job_id],
375 | from_obj=[
376 | db.JOBS_TABLE.outerjoin(
377 | db.METADATA_TABLE,
378 | db.JOBS_TABLE.c.job_id == db.METADATA_TABLE.c.job_id,
379 | )
380 | ],
381 | )
382 | .group_by(db.JOBS_TABLE.c.job_id)
383 | .order_by(db.JOBS_TABLE.c.requested_timestamp.desc())
384 | .limit(limit)
385 | .offset(offset)
386 | )
387 |
388 | status = args.pop("_status", None)
389 | if status:
390 | select = select.where(db.JOBS_TABLE.c.status == status)
391 |
392 | ors = []
393 | for key, value in args.items():
394 | # Turn strings into unicode to stop SQLAlchemy
395 | # "Unicode type received non-unicode bind param value" warnings.
396 | key = str(key)
397 |
398 | ors.append(
399 | sql.and_(db.METADATA_TABLE.c.key == key, db.METADATA_TABLE.c.value == value)
400 | )
401 |
402 | if ors:
403 | select = select.where(sql.or_(*ors))
404 | select = select.having(sql.func.count(db.JOBS_TABLE.c.job_id) == len(ors))
405 |
406 | result = db.ENGINE.execute(select)
407 | listing = []
408 | for (job_id,) in result:
409 | listing.append(flask.url_for("job_status", job_id=job_id))
410 |
411 | return flask.jsonify(list=listing)
412 |
413 |
414 | class DatetimeJsonEncoder(json.JSONEncoder):
415 | # Custon JSON encoder
416 | def default(self, obj):
417 | if isinstance(obj, datetime.datetime):
418 | return obj.isoformat()
419 |
420 | return json.JSONEncoder.default(self, obj)
421 |
422 |
423 | @app.route("/job/", methods=["GET"])
424 | def job_status(job_id, show_job_key=False, ignore_auth=False):
425 | """Show a specific job.
426 |
427 | :param limit: Limit the number of logs
428 | :type limit: integer
429 |
430 | **Results:**
431 |
432 | :rtype: A dictionary with the following keys
433 | :param status: Status of job (complete, error)
434 | :type status: string
435 | :param sent_data: Input data for job
436 | :type sent_data: json encodable data
437 | :param job_id: An identifier for the job
438 | :type job_id: string
439 | :param result_url: Callback url
440 | :type result_url: url string
441 | :param data: Results from job.
442 | :type data: json encodable data
443 | :param error: Error raised during job execution
444 | :type error: string
445 | :param metadata: Metadata provided when submitting job.
446 | :type metadata: list of key - value pairs
447 | :param requested_timestamp: Time the job started
448 | :type requested_timestamp: timestamp
449 | :param finished_timestamp: Time the job finished
450 | :type finished_timestamp: timestamp
451 |
452 | :statuscode 200: no error
453 | :statuscode 403: not authorized to view the job's data
454 | :statuscode 404: job id not found
455 | :statuscode 409: an error occurred
456 | """
457 | limit = flask.request.args.get("limit")
458 | job_dict = db.get_job(job_id, limit=limit)
459 | if not job_dict:
460 | return json.dumps({"error": "job_id not found"}), 404, headers
461 | if not ignore_auth and not is_authorized(job_dict):
462 | return json.dumps({"error": "not authorized"}), 403, headers
463 | job_dict.pop("api_key", None)
464 | if not show_job_key:
465 | job_dict.pop("job_key", None)
466 | return flask.Response(
467 | json.dumps(job_dict, cls=DatetimeJsonEncoder), mimetype="application/json"
468 | )
469 |
470 |
471 | @app.route("/job/", methods=["DELETE"])
472 | def job_delete(job_id):
473 | """Deletes the job together with its logs and metadata.
474 |
475 | :param job_id: An identifier for the job
476 | :type job_id: string
477 |
478 | :statuscode 200: no error
479 | :statuscode 403: not authorized to delete the job
480 | :statuscode 404: the job could not be found
481 | :statuscode 409: an error occurred
482 | """
483 | conn = db.ENGINE.connect()
484 | job = db.get_job(job_id)
485 | if not job:
486 | return json.dumps({"error": "job_id not found"}), 404, headers
487 | if not is_authorized(job):
488 | return json.dumps({"error": "not authorized"}), 403, headers
489 | trans = conn.begin()
490 | try:
491 | conn.execute(db.JOBS_TABLE.delete().where(db.JOBS_TABLE.c.job_id == job_id))
492 | trans.commit()
493 | return json.dumps({"success": True}), 200, headers
494 | except Exception as e:
495 | trans.rollback()
496 | return json.dumps({"error": str(e)}), 409, headers
497 | finally:
498 | conn.close()
499 |
500 |
501 | @app.route("/job", methods=["DELETE"])
502 | def clear_jobs():
503 | """Clear old jobs
504 |
505 | :param days: Jobs for how many days should be kept (default: 10)
506 | :type days: integer
507 |
508 | :statuscode 200: no error
509 | :statuscode 403: not authorized to delete jobs
510 | :statuscode 409: an error occurred
511 | """
512 | if not is_authorized():
513 | return json.dumps({"error": "not authorized"}), 403, headers
514 |
515 | days = flask.request.args.get("days", None)
516 | return _clear_jobs(days)
517 |
518 |
519 | def _clear_jobs(days=None):
520 | if days is None:
521 | days = app.config.get("KEEP_JOBS_AGE")
522 | else:
523 | try:
524 | days = int(days)
525 | except Exception as e:
526 | return json.dumps({"error": str(e)}), 409, headers
527 | conn = db.ENGINE.connect()
528 | trans = conn.begin()
529 | date = datetime.datetime.now() - datetime.timedelta(days=days)
530 | try:
531 | conn.execute(
532 | db.JOBS_TABLE.delete().where(db.JOBS_TABLE.c.finished_timestamp < date)
533 | )
534 | trans.commit()
535 | return json.dumps({"success": True}), 200, headers
536 | except Exception as e:
537 | trans.rollback()
538 | return json.dumps({"error": str(e)}), 409, headers
539 | finally:
540 | conn.close()
541 |
542 |
543 | @app.route("/job//data", methods=["GET"])
544 | def job_data(job_id):
545 | """Get the raw data that the job returned. The mimetype
546 | will be the value provided in the metdata for the key ``mimetype``.
547 |
548 | **Results:**
549 |
550 | :rtype: string
551 |
552 | :statuscode 200: no error
553 | :statuscode 403: not authorized to view the job's data
554 | :statuscode 404: job id not found
555 | :statuscode 409: an error occurred
556 | """
557 | job_dict = db.get_job(job_id)
558 | if not job_dict:
559 | return json.dumps({"error": "job_id not found"}), 404, headers
560 | if not is_authorized(job_dict):
561 | return json.dumps({"error": "not authorized"}), 403, headers
562 | if job_dict["error"]:
563 | return json.dumps({"error": job_dict["error"]}), 409, headers
564 | content_type = job_dict["metadata"].get("mimetype")
565 | return flask.Response(job_dict["data"], mimetype=content_type)
566 |
567 |
568 | @app.route("/job/", methods=["POST"])
569 | @app.route("/job", methods=["POST"])
570 | def job(job_id=None):
571 | """Submit a job. If no id is provided, a random id will be generated.
572 |
573 | :param job_type: Which kind of job should be run. Has to be one of the
574 | available job types.
575 | :type job_type: string
576 | :param api_key: An API key that is needed to execute the job. This could
577 | be a CKAN API key that is needed to write any data. The key will also be
578 | used to administer jobs. If you don't want to use a real API key, you can
579 | provide a random string that you keep secure.
580 | :type api_key: string
581 | :param data: Data that is send to the job as input. (Optional)
582 | :type data: json encodable data
583 | :param result_url: Callback url that is called once the job has finished.
584 | (Optional)
585 | :type result_url: url string
586 | :param metadata: Data needed for the execution of the job which is not
587 | the input data. (Optional)
588 | :type metadata: list of key - value pairs
589 |
590 | **Results:**
591 |
592 | :rtype: A dictionary with the following keys
593 | :param job_id: An identifier for the job
594 | :type job_id: string
595 | :param job_key: A key that is required to view and administer the job
596 | :type job_key: string
597 |
598 | :statuscode 200: no error
599 | :statuscode 409: an error occurred
600 |
601 | """
602 | if not job_id:
603 | job_id = str(uuid.uuid4())
604 |
605 | # key required for job administration
606 | job_key = str(uuid.uuid4())
607 |
608 | ############# ERROR CHECKING ################
609 | try:
610 | input = flask.request.json
611 | except werkzeug.exceptions.BadRequest:
612 | return json.dumps({"error": "Malformed json"}), 409, headers
613 |
614 | # Idk why but this is needed for some libraries that
615 | # send malformed content types
616 | content_type = flask.request.content_type or ""
617 | if not input and "application/json" in content_type.lower():
618 | try:
619 | input = json.loads(flask.request.data)
620 | except ValueError:
621 | pass
622 | if not input:
623 | return (
624 | json.dumps(
625 | {
626 | "error": (
627 | "Not recognised as json, make "
628 | "sure content type is application/"
629 | "json"
630 | )
631 | }
632 | ),
633 | 409,
634 | headers,
635 | )
636 |
637 | ACCEPTED_ARGUMENTS = set(
638 | ["job_type", "data", "metadata", "result_url", "api_key", "metadata"]
639 | )
640 | extra_keys = set(input.keys()) - ACCEPTED_ARGUMENTS
641 | if extra_keys:
642 | return (
643 | json.dumps(
644 | {
645 | "error": (
646 | "Too many arguments. Extra keys are {}".format(
647 | ", ".join(extra_keys)
648 | )
649 | )
650 | }
651 | ),
652 | 409,
653 | headers,
654 | )
655 |
656 | # check result_url here as good to give warning early.
657 | result_url = input.get("result_url")
658 | if result_url and not result_url.startswith("http"):
659 | return json.dumps({"error": "result_url has to start with http"}), 409, headers
660 |
661 | job_type = input.get("job_type")
662 | if not job_type:
663 | return json.dumps({"error": "Please specify a job type"}), 409, headers
664 |
665 | job_types = list(async_types.keys()) + list(sync_types.keys())
666 |
667 | if job_type not in job_types:
668 | error_string = ("Job type {} not available. Available job types are {}").format(
669 | job_type, ", ".join(sorted(job_types))
670 | )
671 | return json.dumps({"error": error_string}), 409, headers
672 |
673 | api_key = input.get("api_key")
674 | if not api_key:
675 | return json.dumps({"error": "Please provide your API key."}), 409, headers
676 |
677 | metadata = input.get("metadata", {})
678 | if not isinstance(metadata, dict):
679 | return json.dumps({"error": "metadata has to be a json object"}), 409, headers
680 | ############# END CHECKING ################
681 |
682 | synchronous_job = sync_types.get(job_type)
683 | if synchronous_job:
684 | return run_synchronous_job(synchronous_job, job_id, job_key, input)
685 | else:
686 | asynchronous_job = async_types.get(job_type)
687 | return run_asynchronous_job(asynchronous_job, job_id, job_key, input)
688 |
689 |
690 | def run_synchronous_job(job, job_id, job_key, input):
691 | try:
692 | db.add_pending_job(job_id, job_key, **input)
693 | except sa.exc.IntegrityError as e:
694 | error_string = "job_id {} already exists".format(job_id)
695 | return json.dumps({"error": error_string}), 409, headers
696 |
697 | try:
698 | result = job(job_id, input)
699 |
700 | if hasattr(result, "__call__"):
701 | db.mark_job_as_completed(job_id)
702 | return flask.Response(result(), mimetype="application/json")
703 | else:
704 | db.mark_job_as_completed(job_id, result)
705 |
706 | except util.JobError as e:
707 | db.mark_job_as_errored(job_id, e.as_dict())
708 | except Exception as e:
709 | db.mark_job_as_errored(
710 | job_id, traceback.format_tb(sys.exc_info()[2])[-1] + repr(e)
711 | )
712 |
713 | api_key = db.get_job(job_id)["api_key"]
714 | result_ok = send_result(job_id, api_key)
715 |
716 | if not result_ok:
717 | db.mark_job_as_failed_to_post_result(job_id)
718 |
719 | return job_status(job_id=job_id, show_job_key=True, ignore_auth=True)
720 |
721 |
722 | def run_asynchronous_job(job, job_id, job_key, input):
723 | if not scheduler.running:
724 | scheduler.start()
725 | try:
726 | db.add_pending_job(job_id, job_key, **input)
727 | except sa.exc.IntegrityError:
728 | error_string = "job_id {} already exists".format(job_id)
729 | return json.dumps({"error": error_string}), 409, headers
730 | trigger = DateTrigger()
731 | aps_job = scheduler.add_job(job, trigger, [job_id, input], None)
732 |
733 | db.set_aps_job_id(job_id, aps_job.id)
734 |
735 | return job_status(job_id=job_id, show_job_key=True, ignore_auth=True)
736 |
737 |
738 | def is_authorized(job=None):
739 | """Returns true if the request is authorized for the job
740 | if provided. If no job is provided, the user has to be admin
741 | to be authorized.
742 | """
743 | if flogin.current_user.is_authenticated:
744 | return True
745 | if job:
746 | job_key = flask.request.headers.get("Authorization")
747 | if job_key == app.config.get("SECRET_KEY"):
748 | return True
749 | return job["job_key"] == job_key
750 | return False
751 |
752 |
753 | def send_result(job_id, api_key=None):
754 | """Send results to where requested.
755 |
756 | If api_key is provided, it is used, otherwiese
757 | the key from the job will be used.
758 | """
759 | job_dict = db.get_job(job_id)
760 | result_url = job_dict.get("result_url")
761 |
762 | if not result_url:
763 |
764 | # A job with an API key (for using when posting to the callback URL)
765 | # but no callback URL is weird, but it can happen.
766 | db.delete_api_key(job_id)
767 |
768 | return True
769 |
770 | api_key_from_job = job_dict.pop("api_key", None)
771 | if not api_key:
772 | api_key = api_key_from_job
773 | headers = {"Content-Type": "application/json"}
774 | if api_key:
775 | if ":" in api_key:
776 | header, key = api_key.split(":")
777 | else:
778 | header, key = "Authorization", api_key
779 | headers[header] = key
780 |
781 | try:
782 | result = requests.post(
783 | result_url,
784 | data=json.dumps(job_dict, cls=DatetimeJsonEncoder),
785 | headers=headers,
786 | verify=SSL_VERIFY,
787 | )
788 |
789 | db.delete_api_key(job_id)
790 |
791 | except requests.ConnectionError:
792 | return False
793 |
794 | return result.status_code == requests.codes.ok
795 |
796 |
797 | def main():
798 | init()
799 | app.run(app.config.get("HOST"), app.config.get("PORT"))
800 |
801 |
802 | if __name__ == "__main__":
803 | main()
804 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published
637 | by the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
--------------------------------------------------------------------------------
/tests/test_web.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import json
4 | import time
5 | import logging
6 | import uuid
7 | import threading
8 |
9 | import httpretty
10 | import pytest
11 |
12 | import ckanserviceprovider.web as web
13 | import ckanserviceprovider.job as job
14 | import ckanserviceprovider.util as util
15 | import ckanserviceprovider.db as db
16 |
17 |
18 | # The callback URL that ckanserviceprovider will post to when the
19 | # asynchronous background job finishes. We will mock this.
20 | RESULT_URL = "http://0.0.0.0/ckanserviceprovider/result_url"
21 |
22 |
23 | def configure():
24 | """Configure the Flask app.
25 |
26 | This has to be called just once per test run (not e.g. once for each test).
27 |
28 | """
29 | os.environ["JOB_CONFIG"] = os.path.join(
30 | os.path.dirname(__file__), "settings_test.py"
31 | )
32 | web.init()
33 |
34 |
35 | configure()
36 |
37 |
38 | def reset_db():
39 | """Reset the database and scheduler.
40 |
41 | Should be called after each test.
42 |
43 | """
44 | db.drop_all()
45 | db.init(web.app.config.get("SQLALCHEMY_DATABASE_URI"))
46 | web.init_scheduler(web.app.config.get("SQLALCHEMY_DATABASE_URI"))
47 |
48 |
49 | def test_client():
50 | """Return a test client for the ckanserviceprovider web app."""
51 | return web.app.test_client()
52 |
53 |
54 | def login(client, username="testadmin", password="testpass"):
55 | return client.post(
56 | "/login", data=dict(username=username, password=password), follow_redirects=True
57 | )
58 |
59 |
60 | def _make_request_callback_function(event):
61 | """Return an httpretty request callback function that sets the given event.
62 |
63 | This is a helper function for mock_result_url() below.
64 |
65 | """
66 |
67 | def request_callback(request, uri, headers):
68 | event.set()
69 | return (200, headers, "")
70 |
71 | return request_callback
72 |
73 |
74 | def mock_result_url(result_url):
75 | """Mock the given CKAN Service Provider result URL.
76 |
77 | Returns a threading.Event object that you can use to wait for the mock URL
78 | to be called by doing: event.wait().
79 |
80 | The way it works is:
81 |
82 | * A test method calls this function to mock a result_url, receives a
83 | threading event object in return.
84 |
85 | * The test method posts to ckanserviceprovider passing the mocked
86 | result_url.
87 |
88 | * ckanserviceprovider kicks off an asynchronous background job.
89 |
90 | * The test method waits for ckanserviceprovider's asynchronous background
91 | job to finish by doing event.wait().
92 |
93 | * When the job finishes ckanserviceprovider posts to the result_url.
94 |
95 | * The post is intercepted and redirected to a function that sets the
96 | thread event.
97 |
98 | * event.wait() returns and the test method continues.
99 |
100 | """
101 | event = threading.Event()
102 | request_callback = _make_request_callback_function(event)
103 | httpretty.register_uri(httpretty.POST, result_url, body=request_callback)
104 | return event
105 |
106 |
107 | def _mock_test_callback_url(client):
108 | """Mock the tests callback URL.
109 |
110 | _TEST_CALLBACK_URL is a special URL that CKAN service provider calls after
111 | it has completely finished with an asynchronous job. Waiting for this URL
112 | to be called enables tests to assert things that don't happen until after
113 | the normal client callback URL has been called, without any race conditions
114 | in the tests.
115 |
116 | Returns a threading.Event object that you can use to wait for the mock URL
117 | to be called by doing: event.wait().
118 |
119 | """
120 | test_callback_url = client.application.config.get("_TEST_CALLBACK_URL")
121 | event = threading.Event()
122 | request_callback = _make_request_callback_function(event)
123 | httpretty.register_uri(httpretty.GET, test_callback_url, body=request_callback)
124 | return event
125 |
126 |
127 | def number_of_jobs(client):
128 | """Return the number of jobs that the app has in its database.
129 |
130 | :param client: a test client of the ckanserviceprovider flask app
131 |
132 | """
133 | return len(json.loads(client.get("/job").data)["list"])
134 |
135 |
136 | @job.synchronous
137 | def echo(task_id, input_):
138 | if input_["data"].startswith(">"):
139 | raise util.JobError("Do not start message with >")
140 | if input_["data"].startswith("#"):
141 | raise Exception("Something went totally wrong")
142 | return ">" + input_["data"]
143 |
144 |
145 | @job.synchronous
146 | def echo_raw(task_id, input_):
147 | if input_["data"].startswith(">"):
148 | raise util.JobError("Do not start message with >")
149 |
150 | def raw():
151 | for x in sorted(input_["data"]):
152 | yield x
153 |
154 | return raw
155 |
156 |
157 | @job.asynchronous
158 | def example(task_id, input_):
159 | if "time" not in input_["data"]:
160 | raise util.JobError("time not in input")
161 |
162 | time.sleep(input_["data"]["time"])
163 | return "Slept for " + str(input_["data"]["time"]) + " seconds."
164 |
165 |
166 | @job.asynchronous
167 | def failing(task_id, input_):
168 | time.sleep(0.1)
169 | raise util.JobError("failed")
170 |
171 |
172 | @job.asynchronous
173 | def log(task_id, input_):
174 | handler = util.StoringHandler(task_id, input_)
175 | logger = logging.Logger(task_id)
176 | logger.addHandler(handler)
177 |
178 | logger.warning("Just a warning")
179 |
180 |
181 | class TestWeb(object):
182 | def teardown(self):
183 | reset_db()
184 |
185 | def test_get_job_id_with_limit(self):
186 | """Limiting logs works fine"""
187 | client = test_client()
188 | client.post(
189 | "/job/12345",
190 | data=json.dumps(
191 | {"job_type": "example", "api_key": 42, "data": {"time": 0.1}}
192 | ),
193 | content_type="application/json",
194 | )
195 | db.add_logs(job_id="12345", message="message1")
196 | db.add_logs(job_id="12345", message="message2")
197 | db.add_logs(job_id="12345", message="message3")
198 |
199 | # Make sure it works without limit
200 | response = client.get(
201 | "/job/12345", headers={"Authorization": "please_replace_me"}
202 | )
203 | return_data = json.loads(response.data)
204 | assert len(return_data["logs"]) == 3
205 |
206 | # Now test with limit
207 | response = client.get(
208 | "/job/12345?limit=2", headers={"Authorization": "please_replace_me"}
209 | )
210 | return_data = json.loads(response.data)
211 | assert len(return_data["logs"]) == 2
212 |
213 | def test_status(self):
214 | """/status should return JSON with the app version, job types, etc."""
215 | client = test_client()
216 | response = client.get("/status")
217 | status_data = json.loads(response.data)
218 | status_data.pop("stats")
219 | assert status_data == dict(
220 | version=0.1,
221 | job_types=sorted(["failing", "example", "log", "echo_raw", "echo"]),
222 | name="testing",
223 | )
224 |
225 | def test_content_type(self):
226 | """Pages should have content_type "application/json"."""
227 | client = test_client()
228 | for page in ["/job", "/status", "/job/foo"]:
229 | response = client.get(page)
230 | assert response.content_type == "application/json"
231 |
232 | def test_bad_post(self):
233 | """Invalid posts to /job should receive error messages in JSON."""
234 | client = test_client()
235 | response = client.post("/job", data='{"ffsfsafsa":"moo"}')
236 | try:
237 | assert json.loads(response.data) == {
238 | "error": "Not recognised as json, make sure content type is "
239 | "application/json"
240 | }
241 | except AssertionError:
242 | assert json.loads(response.data) == {"error": "Malformed json"}
243 | except json.JSONDecodeError:
244 | assert response.status_code == 415
245 |
246 | response = client.post(
247 | "/job", data='{"ffsfsafsa":moo}', content_type="application/json"
248 | )
249 | assert json.loads(response.data) == {"error": "Malformed json"}
250 |
251 | response = client.post(
252 | "/job",
253 | data=json.dumps({"api_key": 42, "data": {"time": 5}}),
254 | content_type="application/json",
255 | )
256 | assert json.loads(response.data) == {"error": "Please specify a job type"}
257 |
258 | response = client.post(
259 | "/job",
260 | data=json.dumps({"job_type": "moo", "api_key": 42, "data": {"time": 5}}),
261 | content_type="application/json",
262 | )
263 | assert json.loads(response.data) == {
264 | "error": "Job type moo not available. Available job types are "
265 | "echo, echo_raw, example, failing, log"
266 | }
267 |
268 | response = client.post(
269 | "/job",
270 | data=json.dumps({"job_type": "example", "data": {"time": 5}}),
271 | content_type="application/json",
272 | )
273 | assert json.loads(response.data) == {"error": "Please provide your API key."}
274 |
275 | response = client.post(
276 | "/job",
277 | data=json.dumps(
278 | {"job_type": "example", "api_key": 42, "data": {"time": 5}, "foo": 42}
279 | ),
280 | content_type="application/json",
281 | )
282 | assert json.loads(response.data) == {
283 | "error": "Too many arguments. Extra keys are foo"
284 | }
285 |
286 | def test_asynchronous_post_with_good_job(self):
287 | """A valid post to /job should get back a JSON object with a job ID."""
288 | client = test_client()
289 | response = client.post(
290 | "/job",
291 | data=json.dumps(
292 | {"job_type": "example", "api_key": 42, "data": {"time": 0.1}}
293 | ),
294 | content_type="application/json",
295 | )
296 |
297 | return_data = json.loads(response.data)
298 | assert "job_id" in return_data
299 |
300 | @httpretty.activate
301 | def test_callback_url_is_called_with_api_key(self):
302 | """It should use the API key when posting to the callback URL."""
303 | API_KEY = "42"
304 | client = test_client()
305 | event = threading.Event()
306 |
307 | def callback(request, uri, headers):
308 | assert request.headers.get("Authorization") == API_KEY, (
309 | "ckanserviceprovider should put the API key in the "
310 | "Authorization header when calling the callback URL"
311 | )
312 | event.set()
313 | return (200, headers, "")
314 |
315 | httpretty.register_uri(httpretty.POST, RESULT_URL, body=callback)
316 |
317 | client.post(
318 | "/job",
319 | data=json.dumps(
320 | {
321 | "job_type": "example",
322 | "api_key": API_KEY,
323 | "data": {"time": 0.1},
324 | "result_url": RESULT_URL,
325 | }
326 | ),
327 | content_type="application/json",
328 | )
329 |
330 | timeout = 10.0
331 | assert event.wait(
332 | timeout
333 | ), "result_url was not called within {timeout} seconds".format(timeout=timeout)
334 |
335 | @httpretty.activate
336 | def test_get_job_does_not_return_api_key(self):
337 | """The dict that get_job() returns should not contain the API key."""
338 | client = test_client()
339 | mock_result_url(RESULT_URL)
340 | event = _mock_test_callback_url(client)
341 |
342 | response = client.post(
343 | "/job",
344 | data=json.dumps(
345 | {
346 | "job_type": "example",
347 | "api_key": 42,
348 | "data": {"time": 0.1},
349 | "result_url": RESULT_URL,
350 | }
351 | ),
352 | content_type="application/json",
353 | )
354 | return_data = json.loads(response.data)
355 |
356 | timeout = 10.0
357 | assert event.wait(
358 | timeout
359 | ), "_TEST_CALLBACK_URL was not called within {timeout} " "seconds".format(
360 | timeout=timeout
361 | )
362 |
363 | job_ = db.get_job(return_data["job_id"])
364 | assert not job_["api_key"], job_
365 |
366 | def test_post_job_with_custom_id(self):
367 | """Posting a job with a custom ID should return the ID in the JSON."""
368 | client = test_client()
369 |
370 | response = client.post(
371 | "/job/moo",
372 | data=json.dumps(
373 | {"job_type": "example", "api_key": 42, "data": {"time": 0.1}}
374 | ),
375 | content_type="application/json",
376 | )
377 |
378 | assert json.loads(response.data)["job_id"] == "moo", json.loads(response.data)
379 |
380 | # FIXME: I think there's actually a race condition here - if the
381 | # asynchronous background job (running in another thread) finishes before
382 | # we get to the assert it'l fail.
383 | def test_get_job_while_pending(self):
384 | """Create a job with a custom ID and get the job while still pending.
385 |
386 | This tests the value of the job's metadata while the job is still in a
387 | pending status.
388 |
389 | """
390 | client = test_client()
391 | client.post(
392 | "/job/moo",
393 | data=json.dumps(
394 | {"job_type": "example", "api_key": 42, "data": {"time": 1}}
395 | ),
396 | content_type="application/json",
397 | )
398 |
399 | login(client)
400 | response = client.get("/job/moo")
401 |
402 | job_status_data = json.loads(response.data)
403 | job_status_data.pop("requested_timestamp")
404 |
405 | job_status_data.pop("aps_job_id")
406 | assert job_status_data == {
407 | "status": "pending",
408 | "sent_data": {"time": 1},
409 | "job_id": "moo",
410 | "finished_timestamp": None,
411 | "job_type": "example",
412 | "error": None,
413 | "data": None,
414 | "metadata": {},
415 | "logs": [],
416 | "result_url": None,
417 | }, job_status_data
418 |
419 | @httpretty.activate
420 | def test_get_job_when_completed(self):
421 | """Get a job with a custom ID after it has completed.
422 |
423 | Tests the value of the job's metadata after the job has completed.
424 |
425 | """
426 | client = test_client()
427 | event = mock_result_url(RESULT_URL)
428 | client.post(
429 | "/job/moo",
430 | data=json.dumps(
431 | {
432 | "job_type": "example",
433 | "api_key": 42,
434 | "data": {"time": 0.1},
435 | "result_url": RESULT_URL,
436 | }
437 | ),
438 | content_type="application/json",
439 | )
440 |
441 | timeout = 10.0
442 | assert event.wait(
443 | timeout
444 | ), "result_url was not called within {timeout} seconds".format(timeout=timeout)
445 |
446 | login(client)
447 |
448 | response = client.get("/job/moo")
449 |
450 | job_status_data = json.loads(response.data)
451 | job_status_data.pop("requested_timestamp")
452 | job_status_data.pop("finished_timestamp")
453 | job_status_data.pop("aps_job_id")
454 |
455 | assert job_status_data == {
456 | "status": "complete",
457 | "sent_data": {"time": 0.1},
458 | "job_id": "moo",
459 | "job_type": "example",
460 | "error": None,
461 | "data": "Slept for 0.1 seconds.",
462 | "metadata": {},
463 | "logs": [],
464 | "result_url": RESULT_URL,
465 | }, job_status_data
466 |
467 | def test_post_job_with_duplicate_custom_id(self):
468 | """Posting a job with a duplicate ID should error."""
469 | client = test_client()
470 | client.post(
471 | "/job/moo",
472 | data=json.dumps(
473 | {"job_type": "example", "api_key": 42, "data": {"time": 0.1}}
474 | ),
475 | content_type="application/json",
476 | )
477 |
478 | response = client.post(
479 | "/job/moo",
480 | data=json.dumps(
481 | {"job_type": "example", "api_key": 42, "data": {"time": 0.1}}
482 | ),
483 | content_type="application/json",
484 | )
485 |
486 | assert json.loads(response.data) == {
487 | "error": "job_id moo already " "exists"
488 | }, json.loads(response.data)
489 |
490 | def test_post_with_job_error(self):
491 | """If a job raises JobError the response should still contain job_id.
492 |
493 | If a job with a custom ID raises JobError then the "job_id" field in
494 | ckanserviceprovider's HTTP response should still contain the job's
495 | custom ID.
496 |
497 | """
498 | # The 'example' job type (defined above) will raise JobError for this
499 | # data because the data has no "time" key.
500 | client = test_client()
501 | response = client.post(
502 | "/job/missing_time",
503 | data=json.dumps({"job_type": "example", "api_key": 42, "data": {}}),
504 | content_type="application/json",
505 | )
506 |
507 | assert json.loads(response.data)["job_id"] == "missing_time", json.loads(
508 | response.data
509 | )
510 |
511 | def test_post_with_job_exception(self):
512 | """If a job raises an exception the HTTP response should have an error.
513 |
514 | If a job raises an arbitrary exception (e.g. because of a mistake in
515 | the job code) the response to the job post HTTP request should have
516 | "exception" instead of the job ID.
517 |
518 | """
519 | client = test_client()
520 | # The 'example' job type (defined above) will crash on this invalid
521 | # time value.
522 | response = client.post(
523 | "/job/exception",
524 | data=json.dumps(
525 | {"job_type": "example", "api_key": 42, "data": {"time": "not_a_time"}}
526 | ),
527 | content_type="application/json",
528 | )
529 |
530 | assert json.loads(response.data)["job_id"] == "exception", json.loads(
531 | response.data
532 | )
533 |
534 | @httpretty.activate
535 | def test_get_job_with_known_error(self):
536 | """Test getting a job that failed with a JobError.
537 |
538 | Among other things, we expect the job dict to have an "error" key with
539 | the error string from the job function as its value.
540 |
541 | """
542 | client = test_client()
543 | mock_result_url(RESULT_URL)
544 | event = _mock_test_callback_url(client)
545 |
546 | response = client.post(
547 | "/job/missing_time",
548 | data=json.dumps(
549 | {
550 | "job_type": "example",
551 | "api_key": 42,
552 | "data": {},
553 | "result_url": RESULT_URL,
554 | }
555 | ),
556 | content_type="application/json",
557 | )
558 |
559 | login(client)
560 |
561 | timeout = 10.0
562 | assert event.wait(
563 | timeout
564 | ), "_TEST_CALLBACK_URL was not called within {timeout} " "seconds".format(
565 | timeout=timeout
566 | )
567 |
568 | response = client.get("/job/missing_time")
569 |
570 | job_status_data = json.loads(response.data)
571 | job_status_data.pop("requested_timestamp")
572 | job_status_data.pop("finished_timestamp")
573 | job_status_data.pop("aps_job_id")
574 | assert job_status_data == {
575 | "status": "error",
576 | "sent_data": {},
577 | "job_id": "missing_time",
578 | "job_type": "example",
579 | "error": {"message": "time not in input"},
580 | "data": None,
581 | "metadata": {},
582 | "logs": [],
583 | "result_url": RESULT_URL,
584 | }, job_status_data
585 |
586 | # get_job() shouldn't return the API key, either.
587 | job_ = db.get_job(job_status_data["job_id"])
588 | assert not job_["api_key"], job_
589 |
590 | @httpretty.activate
591 | def test_get_job_with_unknown_error(self):
592 | """Test getting a job that failed with a random exception.
593 |
594 | A random exception type caused by an error in the job function code,
595 | as opposed to a deliberately raised JobError.
596 |
597 | """
598 | client = test_client()
599 | mock_result_url(RESULT_URL)
600 | event = _mock_test_callback_url(client)
601 |
602 | response = client.post(
603 | "/job/exception",
604 | data=json.dumps(
605 | {
606 | "job_type": "example",
607 | "api_key": 42,
608 | "data": {"time": "not_a_time"},
609 | "result_url": RESULT_URL,
610 | }
611 | ),
612 | content_type="application/json",
613 | )
614 |
615 | login(client)
616 |
617 | timeout = 10.0
618 | assert event.wait(
619 | timeout
620 | ), "_TEST_CALLBACK_URL was not called within {timeout} " "seconds".format(
621 | timeout=timeout
622 | )
623 |
624 | response = client.get("/job/exception")
625 |
626 | job_status_data = json.loads(response.data)
627 | job_status_data.pop("requested_timestamp")
628 | job_status_data.pop("finished_timestamp")
629 | job_status_data.pop("aps_job_id")
630 |
631 | error = job_status_data.pop("error")
632 |
633 | assert job_status_data == {
634 | "status": "error",
635 | "sent_data": {"time": "not_a_time"},
636 | "job_id": "exception",
637 | "job_type": "example",
638 | "data": None,
639 | "metadata": {},
640 | "logs": [],
641 | "result_url": RESULT_URL,
642 | }, job_status_data
643 | assert "TypeError" in error["message"], error["message"]
644 |
645 | # get_job() shouldn't return the API key, either.
646 | job_ = db.get_job(job_status_data["job_id"])
647 | assert not job_["api_key"], job_
648 |
649 | @httpretty.activate
650 | def test_asynchronous_post_with_result_url(self):
651 | """It should post job results to their result URLs.
652 |
653 | If a job has a result_url parameter then when the job finishes
654 | ckanserviceprovider should post the job's result to the result_url.
655 |
656 | """
657 | client = test_client()
658 |
659 | # A thread event that we'll set when the mocked result URL is posted to
660 | event = threading.Event()
661 |
662 | # Mock the result URL.
663 | def result_url(request, uri, headers):
664 | """Handle a request to the mocked result URL."""
665 |
666 | try:
667 | assert request.headers["content-type"] == "application/json", (
668 | "ckanserviceprovider should post to result URLs with "
669 | "content-type application/json"
670 | )
671 |
672 | # Check that the result URL was called with the right data.
673 | data = json.loads(request.body)
674 | data.pop("requested_timestamp")
675 | data.pop("finished_timestamp")
676 | data.pop("job_key")
677 | data.pop("aps_job_id")
678 | assert data == {
679 | "status": "complete",
680 | "sent_data": {"time": 0.1},
681 | "job_id": "with_result",
682 | "job_type": "example",
683 | "result_url": RESULT_URL,
684 | "error": None,
685 | "data": "Slept for 0.1 seconds.",
686 | "metadata": {"key": "value"},
687 | "logs": [],
688 | }
689 | finally:
690 | event.set()
691 | return (200, headers, request.body)
692 |
693 | httpretty.register_uri(httpretty.POST, RESULT_URL, body=result_url)
694 |
695 | response = client.post(
696 | "/job/with_result",
697 | data=json.dumps(
698 | {
699 | "job_type": "example",
700 | "data": {"time": 0.1},
701 | "metadata": {"key": "value"},
702 | "result_url": RESULT_URL,
703 | "api_key": "header:key",
704 | }
705 | ),
706 | content_type="application/json",
707 | )
708 |
709 | # Wait until ckanserviceprovider has posted the result of its
710 | # asynchronous background job to the mocked result URL.
711 | timeout = 10.0
712 | assert event.wait(
713 | timeout
714 | ), "result_url was not posted to within {timeout} seconds".format(
715 | timeout=timeout
716 | )
717 |
718 | login(client)
719 |
720 | response = client.get("/job/with_result")
721 | job_status_data = json.loads(response.data)
722 | job_status_data.pop("requested_timestamp")
723 | job_status_data.pop("finished_timestamp")
724 | job_status_data.pop("aps_job_id")
725 |
726 | assert job_status_data == {
727 | "status": "complete",
728 | "sent_data": {"time": 0.1},
729 | "job_id": "with_result",
730 | "job_type": "example",
731 | "error": None,
732 | "data": "Slept for 0.1 seconds.",
733 | "metadata": {"key": "value"},
734 | "logs": [],
735 | "result_url": RESULT_URL,
736 | }, job_status_data
737 |
738 | @httpretty.activate
739 | def test_asynchronous_post_with_bad_result_url(self):
740 | """It should store an error if given a bad result URL.
741 |
742 | If given an asynchronous job request with a bad result URL
743 | ckanserviceprovider should store a
744 | "Process completed but unable to post to result_url" error.
745 |
746 | This error overwrites any error that might have happened with the job
747 | itself!
748 |
749 | """
750 | client = test_client()
751 | event = _mock_test_callback_url(client)
752 |
753 | httpretty.register_uri(httpretty.POST, RESULT_URL, status=404)
754 |
755 | response = client.post(
756 | "/job/with_bad_result",
757 | data=json.dumps(
758 | {
759 | "job_type": "example",
760 | "api_key": 42,
761 | "data": {"time": 0.1},
762 | "metadata": {"key": "value"},
763 | "result_url": RESULT_URL,
764 | }
765 | ),
766 | content_type="application/json",
767 | )
768 |
769 | timeout = 10.0
770 | assert event.wait(
771 | timeout
772 | ), "_TEST_CALLBACK_URL was not called within {timeout} " "seconds".format(
773 | timeout=timeout
774 | )
775 |
776 | login(client)
777 | response = client.get("/job/with_bad_result")
778 | job_status_data = json.loads(response.data)
779 | job_status_data.pop("requested_timestamp")
780 | job_status_data.pop("finished_timestamp")
781 | job_status_data.pop("aps_job_id")
782 |
783 | assert job_status_data == {
784 | "status": "complete",
785 | "sent_data": {"time": 0.1},
786 | "job_id": "with_bad_result",
787 | "job_type": "example",
788 | "error": {
789 | "message": "Process completed but unable to post to " "result_url"
790 | },
791 | "data": "Slept for 0.1 seconds.",
792 | "metadata": {"key": "value"},
793 | "logs": [],
794 | "result_url": RESULT_URL,
795 | }, job_status_data
796 |
797 | job_ = db.get_job(job_status_data["job_id"])
798 | assert not job_["api_key"], job_
799 |
800 | def test_missing_job_id(self):
801 | """Trying to get a job ID that doesn't exist should return an HTTP 404.
802 |
803 | The response body should be a JSON object containing a not found error.
804 |
805 | """
806 | client = test_client()
807 | response = client.get("/job/not_there")
808 | assert response.status_code == 404, response.status
809 | error = json.loads(response.data)
810 | assert error == {"error": "job_id not found"}
811 |
812 | def test_not_authorized_to_view_job(self):
813 | """Getting a job that you're not authorized to view should 403."""
814 | client = test_client()
815 | response = client.post(
816 | "/job/one_job",
817 | data=json.dumps({"job_type": "echo", "api_key": 42}),
818 | content_type="application/json",
819 | )
820 |
821 | assert response.status_code == 200, response.status
822 | job_status_data = json.loads(response.data)
823 | job_key = job_status_data["job_key"]
824 | response = client.get("/job/one_job")
825 |
826 | assert response.status_code == 403, response.status
827 | error = json.loads(response.data)
828 | assert error == {"error": "not authorized"}
829 |
830 | headers = {"Authorization": job_key}
831 | response = client.get("/job/one_job", headers=headers)
832 | assert response.status_code == 200, response.status
833 |
834 | def test_bad_metadata(self):
835 | """Posting a job with non-JSON metadata should error."""
836 | client = test_client()
837 | response = client.post(
838 | "/job/with_bad_metadata",
839 | data=json.dumps(
840 | {
841 | "job_type": "example",
842 | "api_key": 42,
843 | "data": {"time": 0.1},
844 | "metadata": "meta",
845 | }
846 | ),
847 | content_type="application/json",
848 | )
849 |
850 | return_value = json.loads(response.data)
851 | assert return_value == {
852 | "error": "metadata has to be a " "json object"
853 | }, return_value
854 |
855 | def test_bad_url(self):
856 | """Posting a job with an invalid result_url should error."""
857 | client = test_client()
858 | response = client.post(
859 | "/job/with_bad_result",
860 | data=json.dumps(
861 | {
862 | "job_type": "example",
863 | "api_key": 42,
864 | "data": {"time": 0.1},
865 | "metadata": "meta",
866 | "result_url": "ht//0.0.0.0:9091/resul",
867 | }
868 | ),
869 | content_type="application/json",
870 | )
871 |
872 | return_value = json.loads(response.data)
873 | assert return_value == {
874 | "error": "result_url has to start " "with http"
875 | }, return_value
876 |
877 | @pytest.mark.skip(reason="Can't change misfire_grace_time in APScheduler>3")
878 | @httpretty.activate
879 | def test_misfire(self):
880 | """Jobs should error if not completed within the misfire_grace_time."""
881 | client = test_client()
882 | event = mock_result_url(RESULT_URL)
883 |
884 | web.scheduler.misfire_grace_time = 0.000001
885 | response = client.post(
886 | "/job/misfire",
887 | data=json.dumps(
888 | {
889 | "job_type": "example",
890 | "api_key": 42,
891 | "data": {"time": 0.1},
892 | "metadata": {
893 | "moon": "moon",
894 | "nested": {"nested": "nested"},
895 | "key": "value",
896 | },
897 | "result_url": RESULT_URL,
898 | }
899 | ),
900 | content_type="application/json",
901 | )
902 |
903 | timeout = 10.0
904 | assert event.wait(
905 | timeout
906 | ), "result_url was not called within {timeout} seconds".format(timeout=timeout)
907 |
908 | login(client)
909 | response = client.get("/job/misfire")
910 | job_status_data = json.loads(response.data)
911 | job_status_data.pop("requested_timestamp")
912 | job_status_data.pop("finished_timestamp")
913 | job_status_data.pop("aps_job_id")
914 |
915 | assert job_status_data == {
916 | "status": "error",
917 | "sent_data": {"time": 0.1},
918 | "job_id": "misfire",
919 | "job_type": "example",
920 | "error": {"message": "Job delayed too long, service full"},
921 | "data": None,
922 | "logs": [],
923 | "metadata": {
924 | "key": "value",
925 | "moon": "moon",
926 | "nested": {"nested": "nested"},
927 | },
928 | "result_url": RESULT_URL,
929 | }
930 |
931 | def test_synchronous_raw_post(self):
932 | """Posting a raw synchronous job should get result in response body.
933 |
934 | User posts a "raw" synchronous job request, ckan-service-provider runs
935 | the job and returns an HTTP response with the job result as body.
936 | (A "raw" job is one whose result is a raw text value rather than JSON
937 | text.)
938 |
939 | """
940 | client = test_client()
941 | response = client.post(
942 | "/job/echoraw",
943 | data=json.dumps(
944 | {
945 | "metadata": {"key": "value", "moo": "moo"},
946 | "job_type": "echo_raw",
947 | "api_key": 42,
948 | "data": "ping",
949 | }
950 | ),
951 | content_type="application/json",
952 | )
953 |
954 | if sys.version_info[0] < 3:
955 | assert response.data == "ginp"
956 | else:
957 | assert response.data == b"ginp"
958 |
959 | def test_synchronous_post(self):
960 | """Posting a synchronous job should get a JSON response with result.
961 |
962 | User posts a synchronous job request, ckan-service-provider runs the
963 | job and returns an HTTP response with a JSON body containing the job
964 | result.
965 |
966 | """
967 | client = test_client()
968 | response = client.post(
969 | "/job/echobasic",
970 | data=json.dumps(
971 | {
972 | "metadata": {"key": "value", "moo": "moo", "mimetype": "text/csv"},
973 | "job_type": "echo",
974 | "api_key": 42,
975 | "data": "ping",
976 | }
977 | ),
978 | content_type="application/json",
979 | )
980 |
981 | return_data = json.loads(response.data)
982 | return_data.pop("requested_timestamp")
983 | return_data.pop("finished_timestamp")
984 | return_data.pop("aps_job_id")
985 | job_key = return_data.pop("job_key")
986 |
987 | job_ = db.get_job(return_data["job_id"])
988 | assert not job_["api_key"], job_
989 |
990 | assert return_data == {
991 | "status": "complete",
992 | "sent_data": "ping",
993 | "job_id": "echobasic",
994 | "job_type": "echo",
995 | "result_url": None,
996 | "error": None,
997 | "data": ">ping",
998 | "logs": [],
999 | "metadata": {"key": "value", "moo": "moo", "mimetype": "text/csv"},
1000 | }
1001 |
1002 | login(client)
1003 | response = client.get("/job/echobasic")
1004 | assert response.status_code == 200, response.status
1005 | job_status_data = json.loads(response.data)
1006 | job_status_data.pop("requested_timestamp")
1007 | job_status_data.pop("finished_timestamp")
1008 | job_status_data.pop("aps_job_id")
1009 |
1010 | assert return_data == job_status_data
1011 |
1012 | headers = {"Authorization": job_key}
1013 | response = client.get("/job/echobasic/data", headers=headers)
1014 | assert response.status_code == 200, response.status
1015 |
1016 | if sys.version_info[0] < 3:
1017 | assert response.data == ">ping"
1018 | else:
1019 | assert response.data == b">ping"
1020 | assert "text/csv" in response.content_type, response.content_type
1021 |
1022 | response = client.post(
1023 | "/job/echobasic",
1024 | data=json.dumps({"job_type": "echo", "api_key": 42, "data": "ping"}),
1025 | content_type="application/json",
1026 | )
1027 |
1028 | return_data = json.loads(response.data)
1029 | assert return_data == {"error": "job_id echobasic already exists"}
1030 |
1031 | response = client.post(
1032 | "/job/echoknownbad",
1033 | data=json.dumps({"job_type": "echo", "api_key": 42, "data": ">ping"}),
1034 | content_type="application/json",
1035 | )
1036 | assert response.status_code == 200, response.status
1037 | return_data = json.loads(response.data)
1038 | return_data.pop("requested_timestamp")
1039 | return_data.pop("finished_timestamp")
1040 | return_data.pop("job_key")
1041 | return_data.pop("aps_job_id")
1042 | assert return_data == {
1043 | "status": "error",
1044 | "sent_data": ">ping",
1045 | "job_id": "echoknownbad",
1046 | "job_type": "echo",
1047 | "result_url": None,
1048 | "error": {"message": "Do not start message with >"},
1049 | "data": None,
1050 | "logs": [],
1051 | "metadata": {},
1052 | }
1053 |
1054 | response = client.post(
1055 | "/job/echounknownbad",
1056 | data=json.dumps({"job_type": "echo", "api_key": 42, "data": 1}),
1057 | content_type="application/json",
1058 | )
1059 | return_data = json.loads(response.data)
1060 | assert "AttributeError" in return_data["error"]["message"]
1061 |
1062 | response = client.post(
1063 | "/job/echobad_url",
1064 | data=json.dumps(
1065 | {
1066 | "job_type": "echo",
1067 | "api_key": 42,
1068 | "data": "moo",
1069 | "result_url": "http://bad_url",
1070 | }
1071 | ),
1072 | content_type="application/json",
1073 | )
1074 | return_data = json.loads(response.data)
1075 | return_data.pop("requested_timestamp")
1076 | return_data.pop("finished_timestamp")
1077 | return_data.pop("job_key")
1078 | return_data.pop("aps_job_id")
1079 | assert return_data == {
1080 | "status": "complete",
1081 | "sent_data": "moo",
1082 | "job_id": "echobad_url",
1083 | "job_type": "echo",
1084 | "result_url": "http://bad_url",
1085 | "error": {
1086 | "message": "Process completed but unable to post to " "result_url"
1087 | },
1088 | "data": ">moo",
1089 | "logs": [],
1090 | "metadata": {},
1091 | }
1092 |
1093 | @httpretty.activate
1094 | def test_logging(self):
1095 | """Getting /job/log should return logs from the job as JSON.
1096 |
1097 | Jobs can log messages using a standard logger with a StoringHandler
1098 | attached, and users can retrieve the logged messages using the
1099 | /job/log API.
1100 |
1101 | """
1102 | client = test_client()
1103 | event = mock_result_url(RESULT_URL)
1104 | response = client.post(
1105 | "/job/log",
1106 | data=json.dumps(
1107 | {
1108 | "metadata": {},
1109 | "job_type": "log",
1110 | "api_key": 42,
1111 | "data": "&ping",
1112 | "result_url": RESULT_URL,
1113 | }
1114 | ),
1115 | content_type="application/json",
1116 | )
1117 |
1118 | timeout = 10.0
1119 | assert event.wait(
1120 | timeout
1121 | ), "result_url was not called within {timeout} seconds".format(timeout=timeout)
1122 |
1123 | login(client, username="testadmin", password="wrong")
1124 | response = client.get("/job/log")
1125 | assert response.status_code == 403, response.status
1126 |
1127 | login(client)
1128 | response = client.get("/job/log")
1129 | assert response.status_code == 200, response.status
1130 |
1131 | return_data = json.loads(response.data)
1132 | logs = return_data["logs"]
1133 | assert len(logs) == 1, logs
1134 | log_ = logs[0]
1135 | log_.pop("timestamp")
1136 | log_.pop("lineno")
1137 | assert log_ == {
1138 | "level": "WARNING",
1139 | "module": "test_web",
1140 | "funcName": "log",
1141 | "message": "Just a warning",
1142 | }
1143 |
1144 | def test_delete_job(self):
1145 | """Trying to get the status of a deleted job should return 404.
1146 |
1147 | This also tests that trying to delete a job when you're not authorized
1148 | returns 403.
1149 |
1150 | """
1151 | client = test_client()
1152 | response = client.post(
1153 | "/job/to_be_deleted",
1154 | data=json.dumps(
1155 | {
1156 | "metadata": {"foo": "bar"},
1157 | "job_type": "echo",
1158 | "api_key": 42,
1159 | "data": "&ping",
1160 | }
1161 | ),
1162 | content_type="application/json",
1163 | )
1164 | assert response.status_code == 200, response.status
1165 |
1166 | response = client.delete("/job/to_be_deleted")
1167 | assert response.status_code == 403, response.status
1168 |
1169 | login(client)
1170 | response = client.delete("/job/to_be_deleted")
1171 | assert response.status_code == 200, response.status
1172 |
1173 | response = client.delete("/job/to_be_deleted")
1174 | assert response.status_code == 404, response.status
1175 |
1176 | def test_getting_job_data_for_missing_job(self):
1177 | """Getting the job data for a job that doesn't exist should 404."""
1178 | client = test_client()
1179 | login(client)
1180 | response = client.get("/job/somefoo/data")
1181 | assert response.status_code == 404, response.status
1182 |
1183 | def test_list(self):
1184 | """Tests for /job which should return a list of all the jobs.
1185 |
1186 | Tests the results from getting /job with various different limits and
1187 | filters.
1188 |
1189 | """
1190 | client = test_client()
1191 |
1192 | db.add_pending_job(
1193 | "job_01",
1194 | str(uuid.uuid4()),
1195 | "job_type",
1196 | "result_url",
1197 | "api_key",
1198 | metadata={"key": "value"},
1199 | )
1200 | db.mark_job_as_completed("job_01")
1201 | db.add_pending_job(
1202 | "job_02",
1203 | str(uuid.uuid4()),
1204 | "job_type",
1205 | "result_url",
1206 | "api_key",
1207 | metadata={"key": "value", "moo": "moo"},
1208 | )
1209 | db.mark_job_as_completed("job_02")
1210 | db.add_pending_job(
1211 | "job_03",
1212 | str(uuid.uuid4()),
1213 | "job_type",
1214 | "result_url",
1215 | "api_key",
1216 | metadata={"key": "value", "moo": "moo"},
1217 | )
1218 | db.mark_job_as_completed("job_03")
1219 | db.add_pending_job(
1220 | "job_04",
1221 | str(uuid.uuid4()),
1222 | "job_type",
1223 | "result_url",
1224 | "api_key",
1225 | metadata={"key": "value"},
1226 | )
1227 | db.mark_job_as_completed("job_04")
1228 | db.add_pending_job(
1229 | "job_05", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1230 | )
1231 | db.mark_job_as_completed("job_05")
1232 | db.add_pending_job(
1233 | "job_06", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1234 | )
1235 | db.mark_job_as_completed("job_06")
1236 | db.add_pending_job(
1237 | "job_07", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1238 | )
1239 | db.mark_job_as_completed("job_07")
1240 | db.add_pending_job(
1241 | "job_08", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1242 | )
1243 | db.mark_job_as_completed("job_08")
1244 | db.add_pending_job(
1245 | "job_09", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1246 | )
1247 | db.add_pending_job(
1248 | "job_10", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1249 | )
1250 | db.add_pending_job(
1251 | "job_11", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1252 | )
1253 | db.add_pending_job(
1254 | "job_12", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1255 | )
1256 | db.add_pending_job(
1257 | "job_13", str(uuid.uuid4()), "job_type", "result_url", "api_key"
1258 | )
1259 |
1260 | response = client.get("/job")
1261 | return_data = json.loads(response.data)
1262 | assert len(return_data["list"]) == 13, return_data["list"]
1263 |
1264 | response = client.get("/job?_limit=1")
1265 | return_data = json.loads(response.data)
1266 | assert len(return_data["list"]) == 1, return_data["list"]
1267 |
1268 | response = client.get("/job?_status=complete")
1269 | return_data = json.loads(response.data)
1270 | assert len(return_data["list"]) == 8, return_data["list"]
1271 |
1272 | response = client.get("/job?key=value")
1273 | return_data = json.loads(response.data)
1274 | assert len(return_data["list"]) == 4, return_data["list"]
1275 |
1276 | response = client.get("/job?key=value&moo=moo")
1277 | return_data = json.loads(response.data)
1278 | assert len(return_data["list"]) == 2, return_data["list"]
1279 |
1280 | response = client.get("/job?key=value&moo=moo&moon=moon")
1281 | return_data = json.loads(response.data)
1282 | assert len(return_data["list"]) == 0, return_data["list"]
1283 |
1284 | response = client.get("/job?key=value&moon=moon")
1285 | return_data = json.loads(response.data)
1286 | assert len(return_data["list"]) == 0, return_data["list"]
1287 |
1288 | def test_clear_all(self):
1289 | """Making a DELETE request to /job should delete all jobs.
1290 |
1291 | This also tests the 403 response when you're not authorized to delete,
1292 | and tests the ?days argument.
1293 |
1294 | """
1295 | client = test_client()
1296 |
1297 | # Add some jobs, all completed and therefore eligible for deletion.
1298 | db.add_pending_job(
1299 | "job_01",
1300 | str(uuid.uuid4()),
1301 | "job_type",
1302 | "result_url",
1303 | "api_key",
1304 | metadata={"key": "value"},
1305 | )
1306 | db.mark_job_as_completed("job_01")
1307 | db.add_pending_job(
1308 | "job_02",
1309 | str(uuid.uuid4()),
1310 | "job_type",
1311 | "result_url",
1312 | "api_key",
1313 | metadata={"key": "value", "moo": "moo"},
1314 | )
1315 | db.mark_job_as_completed("job_02")
1316 | db.add_pending_job(
1317 | "job_03",
1318 | str(uuid.uuid4()),
1319 | "job_type",
1320 | "result_url",
1321 | "api_key",
1322 | metadata={"key": "value", "moo": "moo"},
1323 | )
1324 | db.mark_job_as_completed("job_03")
1325 |
1326 | original_number_of_jobs = number_of_jobs(client)
1327 |
1328 | # This should not delete any jobs because not authorized.
1329 | response = client.delete("/job")
1330 | assert response.status_code == 403, response.status
1331 | assert number_of_jobs(client) == original_number_of_jobs
1332 |
1333 | login(client)
1334 |
1335 | # This should not delete any jobs because the jobs aren't old enough.
1336 | response = client.delete("/job")
1337 | assert response.status_code == 200, response.status
1338 | assert number_of_jobs(client) == original_number_of_jobs
1339 |
1340 | # This should delete all the jobs.
1341 | response = client.delete("/job?days=0")
1342 | assert response.status_code == 200, response.status
1343 | assert number_of_jobs(client) == 0
1344 |
--------------------------------------------------------------------------------