5 | AskOmics provide a visual representation of the user abstraction as a graph.
6 | By starting from a node of interest and iteratively selecting its neighbors,
7 | the user creates a path on an abstraction graph. This path can then be transformed
8 | into a SPARQL query that can be executed on the original dataset.
9 |
10 |
11 |
12 | Visit askomics.org to learn how to use and deploy AskOmics.
13 |
--------------------------------------------------------------------------------
/askomics/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {{title}}
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/cli/add_user.py:
--------------------------------------------------------------------------------
1 | """CLI to add a user into the Askomics database"""
2 | import argparse
3 |
4 | from askomics.app import create_app, create_celery
5 | from askomics.libaskomics.LocalAuth import LocalAuth
6 | from askomics.libaskomics.Start import Start
7 |
8 |
9 | class AddUser(object):
10 | """Add a user into AskOmics Database
11 |
12 | Attributes
13 | ----------
14 | application : Flask app
15 | Flask App
16 | args : args
17 | User arguments
18 | celery : Celery
19 | Celery
20 | session : dict
21 | Empty session
22 | user : dict
23 | The New user
24 | """
25 |
26 | def __init__(self):
27 | """Get Args"""
28 | parser = argparse.ArgumentParser(description="Add a new user into AskOmics database")
29 |
30 | parser.add_argument("-c", "--config-file", type=str, help="AskOmics config file", required=True)
31 |
32 | parser.add_argument("-f", "--first-name", type=str, help="User first name", default="Ad")
33 | parser.add_argument("-l", "--last-name", type=str, help="User last name", default="Min")
34 | parser.add_argument("-u", "--username", type=str, help="User username", default="admin")
35 | parser.add_argument("-p", "--password", type=str, help="User password", default="admin")
36 | parser.add_argument("-e", "--email", type=str, help="User email", default="admin@example.org")
37 | parser.add_argument("-k", "--api-key", type=str, help="User API key", default="admin")
38 |
39 | parser.add_argument("-g", "--galaxy-url", type=str, help="Galaxy URL")
40 | parser.add_argument("-gk", "--galaxy-apikey", type=str, help="Galaxy API key")
41 |
42 | self.args = parser.parse_args()
43 |
44 | self.application = create_app(config=self.args.config_file)
45 | self.celery = create_celery(self.application)
46 | self.session = {}
47 | self.user = None
48 |
49 | starter = Start(self.application, self.session)
50 | starter.start()
51 |
52 | def main(self):
53 | """Insert the user and the eventualy galaxy account"""
54 | # Inseret user in database
55 | inputs = {
56 | "fname": self.args.first_name,
57 | "lname": self.args.last_name,
58 | "username": self.args.username,
59 | "email": self.args.email,
60 | "password": self.args.password,
61 | "apikey": self.args.api_key
62 | }
63 |
64 | local_auth = LocalAuth(self.application, self.session)
65 | if local_auth.get_number_of_users() > 0:
66 | self.application.logger.error("Database is not empty, user {} will not be created".format(self.args.username))
67 | return
68 | self.application.logger.info("Create user {}".format(self.args.username))
69 | self.user = local_auth.persist_user(inputs)
70 | self.session["user"] = self.user
71 | local_auth.create_user_directories(self.user["id"], self.user["username"])
72 |
73 | # insert Galaxy account if set
74 | if self.args.galaxy_url and self.args.galaxy_apikey:
75 | result = local_auth.add_galaxy_account(self.user, self.args.galaxy_url, self.args.galaxy_apikey)
76 | self.user = result["user"]
77 |
78 |
79 | if __name__ == '__main__':
80 | """main"""
81 | AddUser().main()
82 |
--------------------------------------------------------------------------------
/cli/clear_cache.py:
--------------------------------------------------------------------------------
1 | """CLI to clear cache for all users"""
2 | import argparse
3 |
4 | from askomics.app import create_app, create_celery
5 | from askomics.libaskomics.LocalAuth import LocalAuth
6 | from askomics.libaskomics.Start import Start
7 |
8 |
9 | class ClearCache(object):
10 | """Update base_url for all graphs
11 |
12 | Attributes
13 | ----------
14 | application : Flask app
15 | Flask App
16 | args : args
17 | User arguments
18 | celery : Celery
19 | Celery
20 | session : dict
21 | Empty session
22 | """
23 |
24 | def __init__(self):
25 | """Get Args"""
26 | parser = argparse.ArgumentParser(description="Update base_url for all graphs")
27 |
28 | parser.add_argument("-c", "--config-file", type=str, help="AskOmics config file", required=True)
29 |
30 | self.args = parser.parse_args()
31 |
32 | self.application = create_app(config=self.args.config_file)
33 | self.celery = create_celery(self.application)
34 | self.session = {}
35 | starter = Start(self.application, self.session)
36 | starter.start()
37 |
38 | def main(self):
39 | """Update graphs"""
40 |
41 | local_auth = LocalAuth(self.application, self.session)
42 | self.application.logger.info("Clearing abstraction cache")
43 | local_auth.clear_abstraction_cache()
44 |
45 |
46 | if __name__ == '__main__':
47 | """main"""
48 | ClearCache().main()
49 |
--------------------------------------------------------------------------------
/cli/clear_cache.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | python3 cli/clear_cache.py -c config/askomics.ini
4 |
--------------------------------------------------------------------------------
/cli/config_updater.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import configparser
3 |
4 |
5 | def main():
6 | """Update ini entry of a config file"""
7 |
8 | parser = argparse.ArgumentParser(description='Update AskOmics config file')
9 |
10 | parser.add_argument('-p', '--path', type=str, help='Path to config file', required=True)
11 | parser.add_argument('-s', '--section', type=str, help='Section to add/update', required=True)
12 | parser.add_argument('-k', '--key', type=str, help='Key to add into the section', required=True)
13 | parser.add_argument('-v', '--value', type=str, help='Value of the key', required=True)
14 |
15 | args = parser.parse_args()
16 |
17 | path = args.path
18 | section = args.section
19 | key = args.key
20 | value = args.value
21 |
22 | config = configparser.ConfigParser()
23 | config.read(path)
24 |
25 | if section not in config.sections():
26 | config.add_section(section)
27 |
28 | config[section][key] = value
29 | config.write(open(path, 'w'))
30 |
31 |
32 | if __name__ == '__main__':
33 | main()
34 |
--------------------------------------------------------------------------------
/cli/set_config.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | config_template_path="config/askomics.ini.template"
4 | config_path="config/askomics.ini"
5 |
6 | tmpfile=$(mktemp /tmp/askomics.ini.XXXXXX)
7 |
8 | # Init config file
9 | cp $config_template_path $tmpfile
10 |
11 | # Convert env to ini entry
12 | printenv | egrep "ASKO_" | while read setting
13 | do
14 | section=$(echo $setting | egrep -o "^ASKO[^=]+" | sed 's/^.\{5\}//g' | cut -d "_" -f 1)
15 | key=$(echo $setting | egrep -o "^ASKO[^=]+" | sed 's/^.\{5\}//g' | sed "s/$section\_//g")
16 | value=$(echo $setting | egrep -o "=.*$" | sed 's/^=//g')
17 | # crudini --set ${tmpfile} "${section}" "${key}" "${value}"
18 | python3 cli/config_updater.py -p $tmpfile -s "${section}" -k "${key}" -v "${value}"
19 | done
20 |
21 | # config ready, copy to dest
22 | cp $tmpfile $config_path
23 |
--------------------------------------------------------------------------------
/cli/set_user.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | # Create user (once) if CREATE_USER == true
4 | first_name=$(if [ -z ${USER_FIRST_NAME} ]; then echo "Ad"; else echo "${USER_FIRST_NAME}"; fi)
5 | last_name=$(if [ -z ${USER_LAST_NAME} ]; then echo "Min"; else echo "${USER_LAST_NAME}"; fi)
6 | username=$(if [ -z ${USER_USERNAME} ]; then echo "admin"; else echo "${USER_USERNAME}"; fi)
7 | user_password=$(if [ -z ${USER_PASSWORD} ]; then echo "admin"; else echo "${USER_PASSWORD}"; fi)
8 | email=$(if [ -z ${USER_EMAIL} ]; then echo "admin@example.org"; else echo "${USER_EMAIL}"; fi)
9 | api_key=$(if [ -z ${USER_APIKEY} ]; then echo "admin"; else echo "${USER_APIKEY}"; fi)
10 |
11 | if [[ ! -z ${CREATE_USER} ]]; then
12 | if [[ ${CREATE_USER} == "true" ]]; then
13 | # Create default user
14 | galaxy_args=""
15 | if [[ ! -z ${GALAXY_API_KEY} ]]; then
16 | if [[ ! -z ${GALAXY_URL} ]]; then
17 | galaxy_args="-g ${GALAXY_URL} -gk ${GALAXY_API_KEY}"
18 | fi
19 | fi
20 | python3 cli/add_user.py -c config/askomics.ini -f ${first_name} -l ${last_name} -u ${username} -p ${user_password} -e ${email} -k ${api_key} ${galaxy_args}
21 | fi
22 | fi
23 |
--------------------------------------------------------------------------------
/cli/update_base_url.py:
--------------------------------------------------------------------------------
1 | """CLI to update base_url for all graphs"""
2 | import argparse
3 | import sys
4 |
5 | from askomics.app import create_app, create_celery
6 | from askomics.libaskomics.LocalAuth import LocalAuth
7 | from askomics.libaskomics.Start import Start
8 |
9 |
10 | class UpdateUrl(object):
11 | """Update base_url for all graphs
12 |
13 | Attributes
14 | ----------
15 | application : Flask app
16 | Flask App
17 | args : args
18 | User arguments
19 | celery : Celery
20 | Celery
21 | session : dict
22 | Empty session
23 | """
24 |
25 | def __init__(self):
26 | """Get Args"""
27 | parser = argparse.ArgumentParser(description="Update base_url for all graphs")
28 |
29 | parser.add_argument("-c", "--config-file", type=str, help="AskOmics config file", required=True)
30 | parser.add_argument("-o", "--old_url", type=str, help="Old base url", required=True)
31 | parser.add_argument("-n", "--new_url", type=str, help="New base url", required=True)
32 |
33 | self.args = parser.parse_args()
34 |
35 | if not (self.args.old_url and self.args.new_url):
36 | print("Error: old_url and new_url must not be empty")
37 | sys.exit(1)
38 |
39 | self.check_urls(self.args.old_url, self.args.new_url)
40 |
41 | self.application = create_app(config=self.args.config_file)
42 | self.celery = create_celery(self.application)
43 | self.session = {}
44 | starter = Start(self.application, self.session)
45 | starter.start()
46 |
47 | def check_urls(self, old_url, new_url):
48 | # Some checks
49 | if not (old_url.startswith("http://") or old_url.startswith("https://")):
50 | print("Error: old_url must starts with either http:// or https://")
51 | sys.exit(1)
52 | if not (new_url.startswith("http://") or new_url.startswith("https://")):
53 | print("Error: new_url must starts with either http:// or https://")
54 | sys.exit(1)
55 |
56 | if not (old_url.endswith("/") and new_url.endswith("/")):
57 | print("Error: Both urls must have a trailing /")
58 | sys.exit(1)
59 |
60 | if old_url.endswith("/data/") and not new_url.endswith("/data/"):
61 | print("Error: Make sure the new url ends with /data/ too")
62 | sys.exit(1)
63 |
64 | if old_url.endswith("/internal/") and not new_url.endswith("/internal/"):
65 | print("Error: Make sure the new url ends with /internal/ too")
66 | sys.exit(1)
67 |
68 | def main(self):
69 | """Update graphs"""
70 |
71 | local_auth = LocalAuth(self.application, self.session)
72 | self.application.logger.info("Update base url from {} to {}".format(self.args.old_url, self.args.new_url))
73 | local_auth.update_base_url(self.args.old_url, self.args.new_url)
74 |
75 |
76 | if __name__ == '__main__':
77 | """main"""
78 | UpdateUrl().main()
79 |
--------------------------------------------------------------------------------
/cli/update_base_url.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | echo "This script will update the base url in all graphs."
4 | echo "You can specify either a base url (http://askomics.org/), or a full endpoint (http://askomics.org/data/)"
5 | echo "Make sure to write a valid url (starting with either http:// or https://, and ending with a trailing /)"
6 | echo ""
7 | read -p "Old base url (ex: http://askomics.org/data/) : " OLD_URL
8 | read -p "New base url (ex: http://my_new_url.com/data/) : " NEW_URL
9 |
10 | python3 cli/update_base_url.py -c config/askomics.ini --old_url $OLD_URL --new_url $NEW_URL
11 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM quay.io/askomics/flaskomics-base:4.6.0-alpine3.19 AS builder
2 | MAINTAINER "Xavier Garnier "
3 |
4 | COPY . /askomics
5 | WORKDIR /askomics
6 |
7 | RUN make clean-config fast-install build
8 |
9 | # Final image
10 | FROM alpine:3.19
11 |
12 | WORKDIR /askomics
13 | RUN apk add --no-cache make python3 bash git libc-dev libstdc++ nodejs-current npm openldap-dev
14 | COPY --from=builder /askomics .
15 |
16 | EXPOSE 5000
17 | CMD make serve-askomics
18 |
--------------------------------------------------------------------------------
/docker/DockerfileAll:
--------------------------------------------------------------------------------
1 | # Build AskOmics
2 | FROM quay.io/askomics/flaskomics-base:4.0.0-alpine3.13 AS askomics_builder
3 | MAINTAINER "Xavier Garnier "
4 |
5 | COPY . /askomics
6 | WORKDIR /askomics
7 |
8 | RUN make clean-config fast-install build
9 |
10 | # Build Corese
11 | FROM xgaia/corese:20.6.11 AS corese_builder
12 |
13 | # Build Virtuoso
14 | FROM askomics/virtuoso:7.2.5.1 AS virtuoso_builder
15 |
16 | # Final image
17 | FROM alpine:3.13
18 |
19 | ENV MODE="prod" \
20 | NTASKS="5" \
21 | SPARQL_UPDATE=true \
22 | ASKO_triplestore_load_url="http://localhost:5000" \
23 | ASKO_federation_query_engine="corese" \
24 | ASKO_federation_endpoint="http://localhost:8080/sparql" \
25 | PATH="/usr/local/virtuoso-opensource/bin/:$PATH" \
26 | MEM=1G \
27 | CORESE_VERSION=20.6.11
28 |
29 | WORKDIR /askomics
30 |
31 | RUN apk add --no-cache openssl py-pip && \
32 | pip install crudini && \
33 | mkdir -p /usr/local/virtuoso-opensource/var/lib/virtuoso/db && \
34 | ln -s /usr/local/virtuoso-opensource/var/lib/virtuoso/db /data && \
35 | apk --no-cache add --update openjdk11 --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community && \
36 | mkdir /corese && \
37 | apk add --no-cache redis sqlite && \
38 | apk add --no-cache make python3 bash git libc-dev libstdc++ nodejs-current nodejs openldap-dev
39 |
40 | COPY --from=virtuoso_builder /usr/local/virtuoso-opensource /usr/local/virtuoso-opensource
41 | COPY --from=virtuoso_builder /virtuoso /virtuoso
42 | COPY --from=corese_builder /corese /corese
43 | COPY --from=askomics_builder /askomics /askomics
44 |
45 | EXPOSE 5000
46 | CMD bash /askomics/docker/start_all.sh
47 |
--------------------------------------------------------------------------------
/docker/DockerfileCelery:
--------------------------------------------------------------------------------
1 | FROM quay.io/askomics/flaskomics-base:4.6.0-alpine3.19 AS builder
2 | MAINTAINER "Xavier Garnier "
3 |
4 | COPY . /askomics
5 | WORKDIR /askomics
6 |
7 | RUN make clean-config fast-install
8 |
9 | # Final image
10 | FROM alpine:3.19
11 |
12 | WORKDIR /askomics
13 | RUN apk add --no-cache make python3 bash git libc-dev libstdc++ openldap-dev
14 | COPY --from=builder /askomics .
15 |
16 | CMD make serve-celery
17 |
--------------------------------------------------------------------------------
/docker/start_all.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | ################################################
4 | # #
5 | # Run AskOmics inside the Docker container #
6 | # #
7 | ################################################
8 |
9 | cd /askomics
10 |
11 | # Start Redis
12 | nohup /usr/bin/redis-server &> /var/log/redis-server.log &
13 |
14 | # Start Virtuoso
15 | nohup /virtuoso/virtuoso.sh &> /var/log/virtuoso.log &
16 |
17 | # Start Corese
18 | nohup sh /corese/start.sh &> /var/log/corese.log &
19 |
20 | # Wait for virtuoso to be up
21 | while ! wget -O /dev/null http://localhost:8890/conductor/; do
22 | sleep 1s
23 | done
24 |
25 | # Start AskOmics
26 | nohup make serve-askomics &> /var/log/askomics.log &
27 |
28 | # Wait for config file to be available
29 | while [[ ! -f /askomics/config/askomics.ini ]]; do
30 | sleep 1s
31 | done
32 |
33 | # Wait a bit more, you never know...
34 | sleep 1s
35 |
36 | # Start Celery
37 | nohup make serve-celery &> /var/log/celery.log &
38 |
39 | tail -f /var/log/askomics.log & tail -f /var/log/celery.log
40 |
--------------------------------------------------------------------------------
/docs/abstraction-overview.md:
--------------------------------------------------------------------------------
1 | Starting from 4.5, a new Abstraction button is available on the navigation bar.
2 | This will lead to a graph showing all nodes and the relations available to the user.
3 |
4 | # Abstraction overview
5 |
6 | This graph will only show the data available to the user (ie, only public data for anonymous users)
7 | You will be able to interact with the graph directly by dragging and scrolling.
8 | A 'Reset zoom' button is available to reset the zoom level.
9 |
10 | !!! note "Info"
11 | Clicking on a node will zoom on it.
12 |
13 | Two visualization modes are available: 2D and 3D.
14 |
15 | ## 2D mode
16 |
17 | 2D mode is the default visualization. In this mode, hovering over a node or a relation will highlight all related nodes, and display particles to show the direction of the relation
18 |
19 | {: .center}
20 |
21 | ## 3D mode
22 |
23 | You can toggle 3D mode by clicking on the '2D/3D' toggle at the top of the screen.
24 | In this mode, you can rotate using the left-click, zoom using the mouse wheel, and pan using the right-click.
25 |
26 | !!! warning
27 | There is no node/relation highlighting in this mode, due to performance issues
28 |
29 | {: .center}
30 |
--------------------------------------------------------------------------------
/docs/ci.md:
--------------------------------------------------------------------------------
1 | AskOmics continuous integration includes code linting and unit tests on the Python API. CI is launched automaticaly on the [askomics](https://github.com/askomics/flaskomics) repository on every pull requests. No PR will be merged if the CI fail.
2 |
3 |
4 | # Setup CI environment
5 |
6 | AskOmics CI need a clean environment. To get it, use `ci/docker-compose.yml` of [flaskomics-docker-compose](https://github.com/askomics/flaskomics-docker-compose). This file will deploy all dependencies on ports specified in `config/askomics.test.ini`.
7 |
8 | ```bash
9 | git clone https://github.com/askomics/flaskomics-docker-compose
10 | cd flaskomics-docker-compose/ci
11 | docker-compose up -d
12 | ```
13 |
14 | # Run CI locally
15 |
16 | First, [install askomics in dev mode](/dev-deployment/#install-askomics).
17 |
18 | Use `make test` to launch the CI.
19 |
--------------------------------------------------------------------------------
/docs/cli.md:
--------------------------------------------------------------------------------
1 | Starting from release 4.3.0, a CLI is available with the [askoclics](https://github.com/askomics/askoclics) python package.
2 | This CLI relies on the AskOmics **API Key**, found in your Account management tab.
3 |
4 | The main goal of the CLI is to help automatize data upload and integration into an existing AskOmics instance.
5 |
6 | Both the python package and the bash command line currently include the following features:
7 |
8 | - File management (Upload, list, preview, integrate, delete)
9 | - Dataset management (List and delete)
10 | - Results management (List, preview results, download results, get sparql query, and delete)
11 | - SPARQL management (Send SPARQL query)
12 |
13 | This library is currently a work in progress.
14 |
--------------------------------------------------------------------------------
/docs/console.md:
--------------------------------------------------------------------------------
1 | A SPARQL console is available through AskOmics, allowing you to send direct SPARQL queries to the endpoint.
2 |
3 | !!! warning
4 | The console access is restricted to **logged users**
5 |
6 | !!! warning
7 | The default AskOmics configuration restrict SPARQL edition and query to the administrators.
8 | This can be disabled with the *enable_sparql_console* configuration option.
9 |
10 | {: .center}
11 |
12 | You can reach this console in two ways:
13 |
14 | # Console access
15 |
16 | - By clicking SPARQL of an existing result in the *Results* page
17 | - The console will be pre-filled with the generated SPARQL query of the result
18 | - Simply heading to the "/sparql" URL
19 | - The console will be pre-filled with a default SPARQL query
20 |
21 | # Editing your query
22 |
23 | You can edit the SPARQL query through the console to customize your query.
24 |
25 | ## Advanced options
26 |
27 | The **Advanced options** tab allows you to customize *how* the query will be sent.
28 | Namely, you will be able to select which endpoints and datasets the query will use, allowing you to fine-tune the query
29 |
30 | - For example, you can exclude some datasets to restrict the results.
31 |
32 | !!! note "Info"
33 | When accessing the console through the "Results" page, the datasets of interest (relevant to the query) will already be selected. Make sure to customize the selection if you modify the query.
34 |
35 | !!! note "Info"
36 | When accessing the console directly, all datasets will be selected (which can increase query time)
37 |
38 | # Launching query
39 |
40 | If you have **editing privileges** (either as an administrator, or through the configuration key), you will be able to either preview or save the query, much like a "normal" query.
41 |
42 | If you save the query, it will appears as a normal result in the "Results" tab. The basic functionalities (templates, download) will be available.
43 |
44 | !!! warning
45 | The Redo button will be disabled for results created from the console
46 |
47 | !!! warning
48 | The generated *template* will redirect to the SPARQL console. It means
49 |
50 | - Non-logged users will not be able to use it
51 | - Only logged users with **editing privileges** will be able to launch the query
52 |
--------------------------------------------------------------------------------
/docs/contribute.md:
--------------------------------------------------------------------------------
1 | # Issues
2 |
3 | If you have an idea for a feature to add or an approach for a bugfix, it is best to communicate with developers early. The most common venues for this are [GitHub issues](https://github.com/askomics/flaskomics/issues/).
4 |
5 | # Pull requests
6 |
7 | All changes to AskOmics should be made through pull requests to [this](https://github.com/askomics/flaskomics) repository.
8 |
9 | [Install AskOmics in development mode](dev-deployment.md), then, create a new branch for your new feature
10 |
11 | ```bash
12 | git checkout -b my_new_feature
13 | ```
14 |
15 | Commit and push your modification to your [fork](https://help.github.com/articles/pushing-to-a-remote/). If your changes modify code, please ensure that is conform to [AskOmics style](#coding-style-guidlines)
16 |
17 | Write tests for your changes, and make sure that they [passe](dev-deployment.md#launch-continuous-integration-locally).
18 |
19 | Open a pull request against the `dev` branch of flaskomics. The message of your pull request should describe your modifications (why and how).
20 |
21 | The pull request should pass all the [continuous integration](ci.md) which is automatically run by Github using Travis CI. The coverage must be at least remain the same (but it's better if it increases)
22 |
23 |
24 | ## Coding style guidelines
25 |
26 | ### General
27 |
28 | Ensure all user-enterable strings are unicode capable. Use only English language for everything (code, documentation, logs, comments, ...)
29 |
30 | ### Python
31 |
32 | We follow the [PEP-8](https://www.python.org/dev/peps/pep-0008/) coding convention.
33 |
34 | - Whitespace around operators and inside parentheses
35 | - 4 spaces per indent (not tabs)
36 | - Include docstrings on your modules, class and methods
37 | - Avoid from module import \*. It can cause name collisions that are tedious to track down.
38 | - Class should be in `CamelCase`, methods and variables in `lowercase_with_underscore`
39 |
40 | ### Javascript
41 |
42 | We follow [W3 JavaScript Style Guide and Coding Conventions](https://www.w3schools.com/js/js_conventions.asp)
43 |
--------------------------------------------------------------------------------
/docs/dev-deployment.md:
--------------------------------------------------------------------------------
1 | In development mode, you can deploy AskOmics dependencies with docker-compose, but AskOmics itself should be running locally, on your development machine.
2 |
3 | # Prerequisites
4 |
5 | Install AskOmics dependencies
6 |
7 |
8 | ```bash
9 | # Debian/Ubuntu
10 | sudo apt install -y git python3 python3-venv python3-dev make gcc zlib1g-dev libbz2-dev liblzma-dev g++ npm
11 | # Fedora
12 | sudo dnf install -y git make gcc zlib-devel bzip2-devel xz-devel python3-devel gcc-c++ npm
13 | ```
14 |
15 |
16 | Install `docker`:
17 |
18 | - [Debian](https://docs.docker.com/install/linux/docker-ce/debian/)
19 | - [Ubuntu](https://docs.docker.com/install/linux/docker-ce/ubuntu/)
20 | - [Fedora](https://docs.docker.com/install/linux/docker-ce/fedora/)
21 |
22 | Install `docker-compose`:
23 |
24 | ```bash
25 | # Debian/Ubuntu
26 | apt install -y docker-compose
27 | # Fedora
28 | dnf install -y docker-compose
29 | ```
30 |
31 | # Deploying dependencies
32 |
33 | We provide a `docker-compose` template to run external services used by AskOmics. Clone the [flaskomics-docker-compose](https://github.com/askomics/flaskomics-docker-compose) repository to use it.
34 |
35 | ```bash
36 | git clone https://github.com/askomics/flaskomics-docker-compose.git
37 | ```
38 |
39 | Use the `dev` directory
40 |
41 | ```bash
42 | cd flaskomics-docker-compose/dev
43 | ```
44 |
45 | Deploy dockers
46 |
47 | ```bash
48 | docker-compose up -d
49 | ```
50 |
51 | # Fork and clone AskOmics repository
52 |
53 |
54 | [Fork](https://help.github.com/articles/fork-a-repo/) the AskOmics repository
55 |
56 | then, clone your fork locally
57 |
58 | ```bash
59 | git clone https://github.com/USERNAME/flaskomics.git # replace USERNAME with your github username
60 | ```
61 |
62 | # Deploy AskOmics in development mode
63 |
64 | AskOmics installation and deployment is made with `make`. Use `make help` to see available commands.
65 |
66 | ## Install
67 |
68 | ```bash
69 | make install MODE=dev
70 | ```
71 |
72 | ## Run
73 |
74 | ```bash
75 | make serve MODE=dev NTASKS=10
76 | ```
77 |
--------------------------------------------------------------------------------
/docs/docs.md:
--------------------------------------------------------------------------------
1 | All the documentation (including what you are reading) can be found [here](https://flaskomics.readthedocs.io). Files are on the [AskOmics repository](https://github.com/askomics/flaskomics/tree/master/docs).
2 |
3 | # Serve the documentation locally
4 |
5 | First, [install askomics in dev mode](/dev-deployment/#install-askomics).
6 |
7 | Then, run
8 |
9 | ```bash
10 | make serve-doc
11 | ```
12 | The documentation will be available at [localhost:8000](localhost:8000)
13 |
14 | To change the port, use `make serve-doc DOCPORT=8001`
15 |
--------------------------------------------------------------------------------
/docs/federation.md:
--------------------------------------------------------------------------------
1 | A federated query is a query who involve several SPARQL endpoints. AskOmics uses its own dedicated endpoint for the integrated data, but it is also possible to query external resources.
2 |
3 |
4 | # Define an external endpoint
5 |
6 | The first step is to define an external endpoint. External endpoint have their own description. To display external entities, AskOmics need the *Abstraction* of the distant endpoint.
7 |
8 | This external abstraction can be build [automatically](#auto-generate-external-abstraction-with-abstractor) or [manually](abstraction.md).
9 |
10 | ## Auto-generate external abstraction with abstractor
11 |
12 | [Abstractor](https://github.com/askomics/abstractor) is a command line tool that auto-generate an abstraction from a distant endpoint.
13 |
14 | ```bash
15 | pip install abstractor
16 | abstractor -e -p -o
17 | ```
18 |
19 | !!! Warning
20 | Abstractor scan all things in the SPARQL endpoint. You may wish to review the generated file to delete unwanted things.
21 |
22 |
23 | ## Integrate external abstraction into AskOmics
24 |
25 | Once external endpoint's abstraction is generated, its time to add it into AskOmics. Upload it and integrate it.
26 | {: .center}
27 |
28 | !!! Warning
29 | Check that `advanced options` > `Distant endpoint` contain URL of the external endpoint
30 |
31 |
32 | # Query external endpoint
33 |
34 | ## Starting entities
35 |
36 | If AskOmics already contains local data, external startpoint are not displayed by default on the start page. Use the `Source` dropdown button to display external entities.
37 |
38 | {: .center}
39 |
40 | ## Linking to your own data
41 |
42 | To link a local dataset to the external endpoint, the file must be structured in a certain way.
43 |
44 | The input file must describe the relation with the external entity. Much like a 'normal' relation, it goes through the header.
45 |
46 | In this case however, instead of simply the entity name, the column name must contain either the full URI or the CURIE of the external entity (e.g *http://nextprot.org/rdf#Gene*). The values of the column must also be the exact uri (full URI or CURIE) of the targeted entity, instead of a raw value.
47 |
48 | For example, the file below describe en entity *gene* who is linked to an external entity *Gene*. The external one is prefixed with the full uri used in the external endpoint. In the values of the column, you will need to also use the full URI / CURIE.
49 |
50 |
51 | gene|value|concern@http://nextprot.org/rdf#Gene
52 | ---|---|---
53 | gene_1|0|http://nextprot.org/rdf/gene/ENSG00000169594
54 | gene_2|1|http://nextprot.org/rdf/gene/ENSG00000156603
55 |
56 |
57 | ## Perform a federated query
58 |
59 | Once the relations are described, links between local and distant entities are automatically created by AskOmics. The query is distributed to the external endpoint and results are returned like a classic query.
60 |
--------------------------------------------------------------------------------
/docs/galaxy.md:
--------------------------------------------------------------------------------
1 | Galaxy is a scientific workflow, data integration, and data and analysis persistence and publishing platform that aims to make computational biology accessible to research scientists that do not have computer programming or systems administration experience.
2 |
3 | A Galaxy Training tutorial is available [here](https://training.galaxyproject.org/training-material/topics/transcriptomics/tutorials/rna-seq-analysis-with-askomics-it/tutorial.html)
4 |
5 | AskOmics can be used with a Galaxy instance in two way:
6 |
7 | - With a dedicated AskOmics, import Galaxy datasets into AskOmics and export AskOmics results into Galaxy.
8 | - In Galaxy: use AskOmics Interactive Tool inside Galaxy
9 |
10 | # Link AskOmics with Galaxy
11 |
12 | ## Create a Galaxy API key
13 |
14 | On your Galaxy account, go to the top menu *User* → *API Keys* and copy your API key. Yhis API key is unique identifier that will be used for AskOmics to access to data.
15 |
16 |
17 |
18 | ## Enter Galaxy API key into your AskOmics account
19 |
20 | On AskOmics, got to Your Name → Account management → **Connect a Galaxy account** and enter the Galaxy URL and API Key.
21 |
22 | {: .center}
23 |
24 | Once a Galaxy account is added to AskOmics, you can access to all your Galaxy Datasets from AskOmics.
25 |
26 | ## Upload a file from Galaxy
27 |
28 | On the Files page, the Galaxy button can be used to browse Galaxy history and import a dataset.
29 |
30 | ## Send result and query to Galaxy
31 |
32 | On the Results page, the **Actions** column of the table have 2 Galaxy button.
33 |
34 | - Send result to Galaxy: Send the result file to the last recently used history
35 | - Send query to Galaxy: send the json graph state that represent the AskOmics query
36 |
37 | ## Import a saved query from Galaxy
38 |
39 | On the Ask! page, the Import Query can be used to import a saved query from Galaxy.
40 |
41 |
42 |
43 |
44 | # Galaxy AskOmics Interactive Tool
45 |
46 |
47 | Galaxy Interactive Tools (GxITs) are a method to run containerized tools that are interactive in nature into the Galaxy interface. AskOmics have his GxIT available into several instances:
48 |
49 | - [usegalaxy.eu](https://usegalaxy.eu)
50 | - [galaxy.genouest.org](https://galaxy.genouest.org)
51 |
52 | ## Launch AskOmics IT
53 |
54 | Search for the AskOmics Interactive tool using the search bar.
55 |
56 | {: .center}
57 |
58 |
59 | Choose input files to automatically upload them into AskOmics
60 |
61 | {: .center}
62 |
63 | !!! Tip
64 | You will able to add more input files later
65 |
66 | A dedicated AskOmics instance will be deployed into the Cluster. Wait few minutes and go to the instance using the `click here to display` link.
67 |
68 | {: .center}
69 |
70 | Once you are into your AskOmics instance, you can see your uploaded files into the Files tab.
71 |
72 | {: .center}
73 |
74 | ## Upload additional files
75 |
76 | in addition to the Computer and URL buttons, you can now use the galaxy button to import datasets from your galaxy histories
77 |
78 |
79 | {: .center}
80 |
81 | ## Integrate and Query
82 |
83 | follow the [tutorial](/tutorial#data-integration) to integrate and query your data.
84 |
85 | ## Export Results into your Galaxy history
86 |
87 | Once you have your result, Use the `Send result to Galaxy` to export a TSV file into your last recently used Galaxy history.
88 |
89 | {: .center}
90 |
--------------------------------------------------------------------------------
/docs/img/abstraction_2d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/abstraction_2d.png
--------------------------------------------------------------------------------
/docs/img/abstraction_3d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/abstraction_3d.png
--------------------------------------------------------------------------------
/docs/img/account_button.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/account_button.png
--------------------------------------------------------------------------------
/docs/img/askogalaxy.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/askogalaxy.png
--------------------------------------------------------------------------------
/docs/img/askograph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/askograph.png
--------------------------------------------------------------------------------
/docs/img/attribute_box.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/attribute_box.png
--------------------------------------------------------------------------------
/docs/img/attributes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/attributes.png
--------------------------------------------------------------------------------
/docs/img/complex_query.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/complex_query.png
--------------------------------------------------------------------------------
/docs/img/csv_convert.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/csv_convert.png
--------------------------------------------------------------------------------
/docs/img/custom_nodes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/custom_nodes.png
--------------------------------------------------------------------------------
/docs/img/datasets.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/datasets.png
--------------------------------------------------------------------------------
/docs/img/de_results_preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/de_results_preview.png
--------------------------------------------------------------------------------
/docs/img/external_startpoint.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/external_startpoint.png
--------------------------------------------------------------------------------
/docs/img/faldo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/faldo.png
--------------------------------------------------------------------------------
/docs/img/files_table.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/files_table.png
--------------------------------------------------------------------------------
/docs/img/filters.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/filters.png
--------------------------------------------------------------------------------
/docs/img/form.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/form.png
--------------------------------------------------------------------------------
/docs/img/form_edit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/form_edit.png
--------------------------------------------------------------------------------
/docs/img/form_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/form_example.png
--------------------------------------------------------------------------------
/docs/img/galaxy_askomics_files.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/galaxy_askomics_files.png
--------------------------------------------------------------------------------
/docs/img/galaxy_execute_it.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/galaxy_execute_it.png
--------------------------------------------------------------------------------
/docs/img/galaxy_history_result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/galaxy_history_result.png
--------------------------------------------------------------------------------
/docs/img/galaxy_import_from_galaxy.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/galaxy_import_from_galaxy.png
--------------------------------------------------------------------------------
/docs/img/galaxy_input_data.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/galaxy_input_data.png
--------------------------------------------------------------------------------
/docs/img/galaxy_search_tool.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/galaxy_search_tool.png
--------------------------------------------------------------------------------
/docs/img/gff.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/gff.png
--------------------------------------------------------------------------------
/docs/img/gff_preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/gff_preview.png
--------------------------------------------------------------------------------
/docs/img/integrate_external.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/integrate_external.png
--------------------------------------------------------------------------------
/docs/img/linked_query.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/linked_query.png
--------------------------------------------------------------------------------
/docs/img/login_button.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/login_button.png
--------------------------------------------------------------------------------
/docs/img/minus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/minus.png
--------------------------------------------------------------------------------
/docs/img/name_attr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/name_attr.png
--------------------------------------------------------------------------------
/docs/img/navbar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/navbar.png
--------------------------------------------------------------------------------
/docs/img/navbar_files.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/navbar_files.png
--------------------------------------------------------------------------------
/docs/img/num_attr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/num_attr.png
--------------------------------------------------------------------------------
/docs/img/ontology_autocomplete.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/ontology_autocomplete.png
--------------------------------------------------------------------------------
/docs/img/ontology_graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/ontology_graph.png
--------------------------------------------------------------------------------
/docs/img/ontology_integration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/ontology_integration.png
--------------------------------------------------------------------------------
/docs/img/ontology_link.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/ontology_link.png
--------------------------------------------------------------------------------
/docs/img/preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/preview.png
--------------------------------------------------------------------------------
/docs/img/preview_results.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/preview_results.png
--------------------------------------------------------------------------------
/docs/img/qtl_preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/qtl_preview.png
--------------------------------------------------------------------------------
/docs/img/query.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/query.png
--------------------------------------------------------------------------------
/docs/img/query_builder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/query_builder.png
--------------------------------------------------------------------------------
/docs/img/results_table.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/results_table.png
--------------------------------------------------------------------------------
/docs/img/sparql.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/sparql.png
--------------------------------------------------------------------------------
/docs/img/startpoint.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/startpoint.png
--------------------------------------------------------------------------------
/docs/img/symbol_to_ensembl_preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/symbol_to_ensembl_preview.png
--------------------------------------------------------------------------------
/docs/img/template.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/template.png
--------------------------------------------------------------------------------
/docs/img/tsv.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/tsv.png
--------------------------------------------------------------------------------
/docs/img/union.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/union.png
--------------------------------------------------------------------------------
/docs/img/union_duplicated.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/union_duplicated.png
--------------------------------------------------------------------------------
/docs/img/uri_label_attr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askomics/flaskomics/2c10eef9e34305844b6afdd37a4e508d6ae5a294/docs/img/uri_label_attr.png
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | AskOmics is a visual SPARQL query interface supporting both intuitive data integration and querying while shielding the user from most of the technical difficulties underlying RDF and SPARQL.
2 |
3 | AskOmics helps loading heterogeneous data (formatted as tabular files, GFF, BED or native RDF files) into an RDF triplestore, to be transparently and interactively queried through a user-friendly interface.
4 |
5 | AskOmics also support federated queries to external SPARQL endpoints.
6 |
7 | {: .center}
8 |
9 | - AskOmics Tutorials
10 | - [Overview tutorial](tutorial.md): How to use AskOmics with example data
11 | - [Prepare your data](data.md): How to format your own data for AskOmics
12 | - [Building a query](query.md): Learn how the query building interface works
13 | - [Results interface](results.md): In-depth guide of the results interface
14 | - [Template & forms](template.md): An overview of the templates & forms functionalities
15 | - [SPARQL console](console.md): How to interact with the provided SPARQL console
16 | - [Command-line interface](cli.md): A python-based CLI for interacting with AskOmics
17 | - [Build an RDF abstraction](abstraction.md): Learn how to build an RDF abstraction for RDF data
18 | - [Perform federated queries](federation.md): How to query your own data with external resources
19 | - [Use AskOmics with Galaxy](galaxy.md): How to connect AskOmics with your Galaxy history
20 | - [Link your data to ontologies](ontologies.md): How to add ontologies to AskOmics, and connect your own data
21 |
22 |
23 | - Administration
24 | - [Deploy an instance](production-deployment.md): Deploy an AskOmics instance on your server
25 | - [Configuration](configure.md): Configure your instance
26 | - [Manage](manage.md): Manage your instance
27 | - [Add custom prefixes](prefixes.md): How to add custom prefixes for your users
28 |
29 |
30 | - Developer documentation
31 | - [Deploy a development instance locally](dev-deployment.md)
32 | - [Contribute to AskOmics](contribute.md)
33 | - [CI](ci.md): Test your code with continuous integration
34 | - [Contribute to doc](docs.md): Write documentation
35 |
--------------------------------------------------------------------------------
/docs/manage.md:
--------------------------------------------------------------------------------
1 | # Make commands
2 |
3 | Several commands are available to help manage your instance. These commands are available through `make` when launched from the same directory as the *Makefile*. (If you are running Askomics in a docker container, you will need to connect to it to launch these commands)
4 |
5 | You can run the `make help` command to get a list of available admin commands.
6 |
7 | # Updating namespaces
8 |
9 | Version 4.2 added the `/data/` route showing the properties linked to a node.
10 | To make sure that your URIs are properly redirecting to this route, you should make sure that the `namespace_data` and `namespace_internal` [configuration option](configure.md) are set to your instance *url*. Make sure to match either `http` or `https` depending on your instance, and don't forget `/data/` or `/internal/`.
11 |
12 | ## Updating an existing instance
13 | If you changed the namespaces after having already integrated some files, you will need to run two additional commands to update your existing data.
14 |
15 | - `make update-base-url`
16 | You will be prompted to enter the previous namespace url, and then the new one.
17 | You can either enter a partial namespace url (ex: `http://askomics.org/`) or the full one (ex: `http://askomics.org/data/`)
18 | In the latter case, you will need to run the command twice (once for each namespace)
19 |
20 | - `make clear-cache`
21 | This will clear the abstraction cache, making sure your data is synchronized with the new namespaces.
22 |
23 | # Single tenant mode
24 |
25 | Starting from release 4.4, the *Single tenant mode* is available through a configuration option.
26 | In Virtuoso, aggregating multiples graphs (using several FROM clauses) can be very costly for big/numerous graphs.
27 |
28 | Single tenant mode send all queries on all stored graphs, thus speeding up the queries. This means that **all graphs are public, and can be queried by any user**. This affect starting points, abstractions, and query.
29 |
30 | !!! warning
31 | If you are storing sensitive data on AskOmics, make sure to disable anonymous access and account creation when using *Single tenant mode*.
32 |
33 | !!! warning
34 | *Single tenant mode* has no effect on federated queries
35 |
36 | # Administrator panel
37 |
38 | Administrators have access to a specific panel in AskOmics.
39 | This Admin tab can be found after clicking on *Your Name ▾*.
40 |
41 | ## User management
42 |
43 | From the Admin tab, administrators are able to:
44 |
45 | - Create a new user account
46 | - Manage existing user accounts
47 | - Blocking an user account
48 | - Setting an user as an administrator
49 | - Updating an user's individual storage quota
50 | - Deleting an user
51 |
52 | They will also be able to check the last time of activity of an user.
53 |
54 | ## Files
55 |
56 | A list of all uploaded files is available. Administrators can delete a file at any time.
57 |
58 | ## Datasets
59 |
60 | All currently stored datasets are available. Administrators can publish, unpublish, and delete them.
61 |
62 | ## Forms / Templates
63 |
64 | A list of **public** forms and templates is available. Administator can unpublish them if need be.
65 |
66 | # Anonymous query
67 |
68 | Starting from release 4.5, the *Anonymous query mode* is available through a configuration option.
69 | This option allows anonymous users to create full queries (not only previews), and access the results/sparql console associated.
70 |
71 | To avoid overloading the server, anonymous queries are regularly deleted. (Every hour for failed queries, and every X days for successful jobs, as defined by the *anonymous_query_cleanup* variable (default 60)).
72 |
73 | !!! warning
74 | Anonymous users cannot create forms/templates, but admin can from the admin panel.
75 | Do keep in mind that anonymous jobs will be deleted at some point.
76 |
77 | !!! warning
78 | If you disable the *anonymous_query*, the job cleaner will not run. You will need to delete the jobs manually from the admin panel.
79 |
--------------------------------------------------------------------------------
/docs/ontologies.md:
--------------------------------------------------------------------------------
1 | Starting for the 4.4 release, hierarchical ontologies (such as the NCBITAXON ontology) can be integrated in AskOmics.
2 | This will allow users to query on an entity, or on its ancestors and descendants
3 |
4 | # Registering an ontology (admin-only)
5 |
6 | !!! warning
7 | While not required for basic queries (and subClassOf queries), registering an ontology is required for enabling auto-completion, using non-default labels (ie: *skos:prefLabel*), and enable an integration shortcut for users.
8 |
9 |
10 | First, make sure to have the [abstraction file](/abstraction/#ontologies) ready. Upload it to AskOmics, and integrate it.
11 | Make sure *to set it public*.
12 |
13 | You can then head to Ontologies in the user tab. There, you will be able to create and delete ontologies.
14 |
15 | ## Creating an ontology
16 |
17 | Parameters to create an ontology are as follows:
18 |
19 | * Ontology name: the full name of the ontology: will be displayed when as a column type when integrating CSV files.
20 | * Ontology short name: the shortname of the ontology (ex: NCBITAXON). /!\ When using ols autocompleting, this needs to match an existing ols ontology
21 | * Ontology uri: The ontology uri in your abstraction file
22 | * Linked public dataset: The *public* dataset containing your classes (not necessarily your abstraction)
23 | * Label uri: The label predicated your classes are using. Default to rdfs:label
24 | * Autocomplete type: If local, autocomplete will work with a SPARQL query (local or federated). If OLS, it will be sent on the OLS endpoint.
25 |
26 | # Linking your data to an ontology
27 |
28 | This functionality will only work with CSV files. You will need to fill out a column with the terms uris.
29 | If the ontology has been registered, you can directly select the ontology's column type.
30 |
31 | {: .center}
32 |
33 | Else, you will need to set the header as you would for a relation, using the ontology uri as the remote entity.
34 |
35 | Ex: `is organism@http://purl.bioontology.org/ontology/NCBITAXON`
36 |
37 | # Querying data using ontological terms
38 |
39 | If your entity is linked to an ontology, the ontology will appears as a related entity on the graph view.
40 | From there, you will be able to directly print the linked term's attributes (label, or other)
41 |
42 | {: .center}
43 |
44 | If the ontology was registered (and an autocompletion type was selected), the label field will have autocompletion (starting after 3 characters).
45 |
46 | {: .center}
47 |
48 | ## Querying on hierarchical relations
49 |
50 | You can also query on a related term, to build queries such as :
51 |
52 | * Give me all entities related to the children of this term
53 | * Give me all entities related any ancestor of this term
54 |
55 | To do so, simply click on the linked ontology circle, fill out the required label (or other attribute), and click on the link between both ontologies to select the type of query (either *children of*, *descendants of*, *parents of*, *ancestors of*)
56 |
57 | {: .center}
58 |
59 | !!! warning
60 | The relation goes from the second ontology circle to the first. Thus, to get the *children of* a specific term, you will need to select the *children of* relation, and select the label on the **second** circle
61 |
--------------------------------------------------------------------------------
/docs/prefixes.md:
--------------------------------------------------------------------------------
1 | Starting for the 4.4 release, custom prefixes can be added in the administration UI.
2 | These prefixes can be used by non-admin users when integrating CSV files (for specifying URIs, for instance)
3 |
4 | # Registering a prefix (admin-only)
5 |
6 | You can head to Prefixes in the user tab. There, you will be able to create and delete custom prefixes.
7 |
8 | ## Creating a custom prefix
9 |
10 | Simply fill out the desired prefix (ex: *wikibase*), and namespace: (ex: *http://wikiba.se/ontology#*).
11 | Users will be able to fill out data using the wikibase:XXX format.
12 |
--------------------------------------------------------------------------------
/docs/production-deployment.md:
--------------------------------------------------------------------------------
1 | In production, AskOmics is deployed using docker and docker-compose. `docker-compose.yml` templates are provided to deploy your own instance.
2 |
3 | # Prerequisites
4 |
5 | Install `git`
6 |
7 | ```bash
8 | # Debian/Ubuntu
9 | apt install -y git
10 | # Fedora
11 | dnf install -y git
12 | ```
13 |
14 | Install `docker`:
15 |
16 | - [Debian](https://docs.docker.com/install/linux/docker-ce/debian/)
17 | - [Ubuntu](https://docs.docker.com/install/linux/docker-ce/ubuntu/)
18 | - [Fedora](https://docs.docker.com/install/linux/docker-ce/fedora/)
19 |
20 | Install `docker-compose`:
21 |
22 | ```bash
23 | # Debian/Ubuntu
24 | apt install -y docker-compose
25 | # Fedora
26 | dnf install -y docker-compose
27 | ```
28 |
29 | # Deploy
30 |
31 | ## Download templates
32 |
33 | First, clone the [flaskomics-docker-compose](https://github.com/askomics/flaskomics-docker-compose) repository. It contain template files to deploy your AskOmics instance.
34 |
35 |
36 | ```bash
37 | git clone https://github.com/askomics/flaskomics-docker-compose.git
38 | ```
39 |
40 | This repo contains several directories, depending on your needs
41 |
42 | ```bash
43 | cd flaskomics-docker-compose
44 | ls -1
45 | ```
46 |
47 | Two directories are used for production deployment
48 |
49 | - `standalone`: deploy AskOmics with all its dependencies for a standalone usage
50 | - `federated`: deploy AskOmics with all its dependencies and a federated query engine for a federated usage (Cross external endpoint such as [NeXtProt](https://sparql.nextprot.org) with local data)
51 |
52 | Choose one of this directory depending of your needs
53 |
54 | ```bash
55 | cd federated
56 | ```
57 | ## Configure
58 |
59 | First, edit the `docker-compose.yml` file. You can change the main port:
60 |
61 | - `services` > `nginx` > `ports`: You can change the default port if `80` is already used on your machine. Example: `"8080:80"` to use `8080` instead of `80`.
62 |
63 | ### Virtuoso
64 |
65 | Then, configure virtuoso by editing `virtuoso.env`
66 |
67 | Edit `VIRT_Parameters_NumberOfBuffers` and `VIRT_Parameters_MaxDirtyBuffers` following rules described [here](https://github.com/askomics/flaskomics-docker-compose#configure-virtuoso).
68 |
69 | !!! warning
70 | Change the `DBA_PASSWORD` if you plan to expose the virtuoso endpoint. The password endpoint have to be the same in `askomics.env` > `ASKO_triplestore_password`
71 |
72 | ### Nginx (web proxy)
73 |
74 | Nginx is used to manage web redirection. Nginx configuration is in two files: `nginx.conf` and `nginx.env`. If you want to access the virtuoso endpoint, uncomment the `virtuoso` section in `nginx.conf`
75 |
76 |
77 | ### AskOmics
78 |
79 | All properties defined in `askomics.ini` can be configured via the environment variables in `askomics.env`. The environment variable should be prefixed with `ASKO_` and have a format like `ASKO_$SECTION_$KEY`. $SECTION and $KEY are case sensitive. *E.g.* property `footer_message` in the `askomics` section should be configured as `ASKO_askomics_footer_message=Welcome to my AskOmics!`.
80 |
81 | !!! warning
82 | Change `ASKO_flask_secret_key` and `ASKO_askomics_password_salt` to random string
83 |
84 | For more information about AskOmics configuration, see the [configuration](configure.md) section.
85 |
86 | #### First user
87 |
88 | Environment variables can also be used to create a user into AskOmics at first start. For this, use `CREATE_USER=true` User information can be configured with the following environment variables:
89 |
90 | - `USER_FIRST_NAME`: User first name (default: Ad)
91 | - `USER_LAST_NAME`: User last name (default: Min)
92 | - `USER_USERNAME`: Username (default: admin)
93 | - `USER_PASSWORD`: Password (default: admin)
94 | - `USER_EMAIL`: User email (default: admin@example.com)
95 | - `USER_APIKEY`: User API key (default: admin)
96 | - `GALAXY_API_KEY`: Galaxy URL linked to the user (optional)
97 | - `GALAXY_URL`: User Galaxy API Key (optional)
98 |
99 | The user will be created only if the users table of the database is empty.
100 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | mkdocs==1.6.0
2 | markdown-captions==2.1.2
3 | jinja2==3.1.4
4 |
--------------------------------------------------------------------------------
/docs/results.md:
--------------------------------------------------------------------------------
1 | On the Results page, you will be able to see all your saved results (after using the Run & save button). Each row stores both the query and its results.
2 |
3 | # General information
4 |
5 | Result information can be found for each row :
6 |
7 | - Creation data: The creation time of this result
8 | - Exec time: The running time of the linked query
9 | - Status: Current status of the query
10 | - Possible values are 'Success', 'Queued', 'Started', 'Failure' and 'Deleting'
11 | - Rows: Number of rows in the result
12 | - Size: Size of the result file
13 |
14 | ## Description
15 |
16 | Each description can be customized by clicking on the field, and entering the desired value. You can use this to identify the query related to this result.
17 |
18 | !!! Warning
19 | Don't forget to save your new description using **Enter**
20 |
21 | !!! note "Info"
22 | The description will be displayed on the main page if you transform this query in a [template or form](template.md).
23 |
24 | # Templates and forms
25 |
26 | You can use the available toggle buttons if you wish to create a [template or form](template.md).
27 |
28 | !!! Warning
29 | Form creation is restricted to administrators. The related query must also be a [form-able query](template.md#Forms).
30 |
31 | # Publication
32 |
33 | The 'Public' toggle is available if you are an administrator. If will automatically create a public form (if the result is form-able), or a template. They will be accessible to **all users** from the main page.
34 |
35 | !!! Tip
36 | Make sure to set a custom description (and [customize your form](template.md#editing-the-form-display), if relevant) to help users understand your template/form.
37 |
38 | # Actions
39 |
40 | Several actions are also available for each result :
41 |
42 | ## Preview
43 |
44 | Preview directly re-launch the related query, and print a preview of the results.
45 | The preview will be shown under the results table.
46 |
47 | ## Download
48 |
49 | Clicking on Download will let you download a CSV file containing the results.
50 |
51 | ## Form
52 |
53 | Clicking on Form will let you customize the related form display.
54 |
55 | !!! Warning
56 | Only available for administator and form-able results.
57 |
58 | ## Redo
59 |
60 | Clicking on Redo will let you directly replay the query from the query interface. It will be in the exact same state as when you clicked on Run & save.
61 |
62 | !!! Warning
63 | Only available results generated from the query interface.
64 |
65 | ## Sparql
66 |
67 | Clicking on Sparql will redirect you to the [SPARQL console](console.md). You will be able to browse the SPARQL code generated by your query.
68 |
69 | !!! note "Info"
70 | Depending on your AskOmics configuration, you might be able to directly customize the query and launch it from the console.
71 |
72 | {: .center}
73 |
74 | # Deletion
75 |
76 | To delete one or more results, simply select them in the table, and use the "Delete" button at the bottom of the table.
77 |
78 | !!! Warning
79 | This will delete any template or form generated from the result.
80 |
--------------------------------------------------------------------------------
/docs/style.css:
--------------------------------------------------------------------------------
1 | navbar {
2 | background-color: #343a40;
3 | color: rgba(255,255,255,.75);
4 | padding-right: 5px;
5 | padding-left: 5px;
6 | border-radius: 5px;
7 | }
8 |
9 | askolink {
10 | color: #0056b3
11 | }
12 |
13 | btn {
14 | background-color: #5a6268;
15 | color: #fff;
16 | padding-right: 5px;
17 | padding-left: 5px;
18 | border-radius: 5px;
19 | }
20 |
21 | btn.white {
22 | border: 2px solid #6c757d;
23 | background-color: white;
24 | color: #6c757d;
25 | cursor: pointer;
26 | }
27 |
28 | btn.white:hover {
29 | color:white;
30 | background: #6c757d;
31 | }
32 |
33 | badge {
34 | padding-right: 5px;
35 | padding-left: 5px;
36 | border-radius: 5px;
37 | }
38 |
39 |
40 |
41 | badge.local {
42 | color: black;
43 | background: rgb(216, 212, 168);
44 | }
45 |
46 | badge.hands-on {
47 | color: white;
48 | background: #53ba11b3;
49 | }
50 |
51 | .admonition.hands-on {
52 | background: rgba(100,221,23,.1);
53 | padding: 12px;
54 | line-height: 24px;
55 | margin-bottom: 24px;
56 | }
57 |
58 | div.admonition.hands-on > p.admonition-title {
59 | background: #53ba11b3
60 | }
61 |
62 | .admonition.quote {
63 | background: #e3e3e3;
64 | padding: 12px;
65 | line-height: 24px;
66 | margin-bottom: 24px;
67 | }
68 |
69 | div.admonition.quote > p.admonition-title {
70 | background: #9e9e9e
71 | }
72 |
73 | .primary {
74 | color: #007bff;
75 | }
76 |
77 | .center {
78 | display: block;
79 | max-width: 100%;
80 | height: auto;
81 | margin: auto;
82 | float: none!important;
83 | text-align: center;
84 | font-style: italic;
85 | }
86 |
87 | .fa { display:inline; }
88 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: AskOmics
2 | repo_url: https://github.com/askomics/flaskomics
3 | edit_uri: edit/master/docs/
4 | site_author: Xavier Garnier
5 |
6 | theme:
7 | name: readthedocs
8 | highlightjs: true
9 | hljs_languages:
10 | - bash
11 | - python
12 | - javascript
13 | markdown_extensions:
14 | - admonition
15 | - attr_list
16 | - markdown_captions
17 | - toc:
18 | permalink: true
19 | toc_depth: 1
20 |
21 |
22 | nav:
23 | - Home: index.md
24 | - AskOmics tutorials:
25 | - Overview tutorial: tutorial.md
26 | - Preparing data: data.md
27 | - Building a query: query.md
28 | - Managing results: results.md
29 | - Templates & forms: template.md
30 | - SPARQL console: console.md
31 | - Command-line interface: cli.md
32 | - Build RDF Abstraction: abstraction.md
33 | - Federated queries: federation.md
34 | - AskOmics and Galaxy: galaxy.md
35 | - AskOmics and ontologies: ontologies.md
36 | - Abstraction overview: abstraction-overview.md
37 | - Administrator guide:
38 | - Deploy AskOmics: production-deployment.md
39 | - Configure: configure.md
40 | - Manage: manage.md
41 | - Custom prefixes: prefixes.md
42 | - Developer guide:
43 | - Dev deployment: dev-deployment.md
44 | - Contribute: contribute.md
45 | - CI: ci.md
46 | - Documentation: docs.md
47 | - Bug Tracker: https://github.com/askomics/flaskomics/issues
48 | - Project monitoring: https://github.com/askomics/flaskomics/projects
49 |
50 | extra_css: [style.css]
51 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "scripts": {
3 | "prod": "webpack --mode production",
4 | "dev": "webpack --mode development --watch"
5 | },
6 | "keywords": [
7 | "Semantic web",
8 | "ontology"
9 | ],
10 | "name": "AskOmics",
11 | "version": "4.5.0",
12 | "description": "Visual SPARQL query builder",
13 | "author": "Xavier Garnier",
14 | "license": "AGPL-3.0",
15 | "repository": {
16 | "type": "git",
17 | "url": "https://github.com/askomics/askomics.git"
18 | },
19 | "devDependencies": {
20 | "@babel/core": "^7.11.6",
21 | "@babel/preset-env": "^7.11.5",
22 | "@babel/preset-react": "^7.10.4",
23 | "babel-eslint": "^10.1.0",
24 | "babel-loader": "^8.1.0",
25 | "eslint": "^7.9.0",
26 | "eslint-config-standard": "^14.1.1",
27 | "eslint-plugin-import": "^2.22.0",
28 | "eslint-plugin-node": "^11.1.0",
29 | "eslint-plugin-promise": "^4.2.1",
30 | "eslint-plugin-react": "^7.20.6",
31 | "eslint-plugin-standard": "^4.0.1",
32 | "npm-check-updates": "^16.3.4",
33 | "terser-webpack-plugin": "^4.2.2",
34 | "webpack": "^5.94.0",
35 | "webpack-cli": "^5.1.4"
36 | },
37 | "dependencies": {
38 | "@fortawesome/fontawesome-free": "^5.14.0",
39 | "@fortawesome/free-brands-svg-icons": "^5.14.0",
40 | "@fortawesome/free-regular-svg-icons": "^5.14.0",
41 | "@sentry/browser": "^7.119.1",
42 | "axios": "^1.6.0",
43 | "babel-preset-env": "^1.7.0",
44 | "bootstrap": "^4.5.2",
45 | "css-loader": "^6.8.1",
46 | "dedent": "^0.7.0",
47 | "dompurify": "^2.0.17",
48 | "file-loader": "^6.1.0",
49 | "history": "^5.0.0",
50 | "immutability-helper": "^3.1.1",
51 | "js-file-download": "^0.4.12",
52 | "pretty-time": "^1.1.0",
53 | "prismjs": "^1.27.0",
54 | "qs": "^6.10.3",
55 | "rc-switch": "^4.1.0",
56 | "react": "^16.13.1",
57 | "react-ace": "^9.1.3",
58 | "react-addons-update": "^15.6.3",
59 | "react-autosuggest": "^10.1.0",
60 | "react-bootstrap-table-next": "^4.0.3",
61 | "react-bootstrap-table2-editor": "^1.4.0",
62 | "react-bootstrap-table2-paginator": "^2.1.2",
63 | "react-contextmenu": "^2.14.0",
64 | "react-datepicker": "^3.6.0",
65 | "react-dom": "^16.13.1",
66 | "react-force-graph": "^1.39.2",
67 | "react-force-graph-3d": "^1.22.0",
68 | "react-resize-detector": "^5.2.0",
69 | "react-router": "^5.2.0",
70 | "react-router-dom": "^5.2.0",
71 | "react-simple-code-editor": "^0.11.0",
72 | "react-sizeme": "^3.0.2",
73 | "react-syntax-highlighter": "^15.4.3",
74 | "react-tooltip": "^5.13.1",
75 | "reactstrap": "^8.6.0",
76 | "style-loader": "^1.2.1",
77 | "three": "^0.150.1",
78 | "three-spritetext": "^1.6.5",
79 | "url-loader": "^4.1.0"
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | pipenv
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | with open('requirements.txt') as f:
4 | requires = f.read().splitlines()
5 |
6 | setup(
7 | name='askomics',
8 | version='4.5.0',
9 | description='''
10 | AskOmics is a visual SPARQL query interface supporting both intuitive
11 | data integration and querying while shielding the user from most of the
12 | technical difficulties underlying RDF and SPARQL
13 | ''',
14 | classifiers=[
15 | "Programming Language :: Python",
16 | "Framework :: Flask",
17 | "Topic :: Internet :: WWW/HTTP",
18 | "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
19 | ],
20 | maintainer='Mateo Boudet',
21 | maintainer_email='mateo.boudet@inrae.fr',
22 | url='https://github.com/askomics/flaskomics',
23 | keyword='rdf sparql query data integration',
24 | packages=find_packages(),
25 | include_package_data=True,
26 | install_requires=requires,
27 | )
28 |
--------------------------------------------------------------------------------
/test-data/abstraction.nt:
--------------------------------------------------------------------------------
1 | .
2 | .
3 | .
4 | .
5 | "Uniprot Subcellular Location Cv" .
6 | .
7 | .
8 | .
9 | .
10 | "Annotation Type" .
11 |
--------------------------------------------------------------------------------
/test-data/abstraction.ttl:
--------------------------------------------------------------------------------
1 | @prefix askomics: .
2 | @prefix ns1: .
3 | @prefix rdfs: .
4 | @prefix xsd: .
5 |
6 | a askomics:AskomicsRelation,
7 | ;
8 | rdfs:label "absorption max" ;
9 | rdfs:domain ;
10 | rdfs:range .
11 |
--------------------------------------------------------------------------------
/test-data/abstraction.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/test-data/agro_min.ttl:
--------------------------------------------------------------------------------
1 | @prefix askomics: .
2 | @prefix owl: .
3 | @prefix rdf: .
4 | @prefix rdfs: .
5 | @prefix xml: .
6 | @prefix xsd: .
7 | @prefix ns1: .
8 |
9 |
10 | a askomics:ontology ;
11 | a owl:ontology ;
12 | rdfs:label "AGRO".
13 |
14 | [] a owl:ObjectProperty ;
15 | a askomics:AskomicsRelation ;
16 | askomics:uri rdfs:subClassOf ;
17 | rdfs:label "subClassOf" ;
18 | rdfs:domain ;
19 | rdfs:range .
20 |
21 |
22 | a owl:Class ;
23 | rdfs:label "desuckering" ;
24 | rdfs:subClassOf .
25 |
26 | a owl:Class ;
27 | rdfs:label "irrigation water source role" ;
28 | rdfs:subClassOf .
29 |
30 | a owl:Class ;
31 | rdfs:label "irrigation water quantity" ;
32 | rdfs:subClassOf ,
33 | .
34 |
35 | a owl:Class ;
36 | rdfs:label "reduced tillage process" ;
37 | rdfs:subClassOf .
38 |
39 | a owl:Class ;
40 | rdfs:label "laser land levelling process" ;
41 | rdfs:subClassOf .
42 |
43 | a owl:Class ;
44 | rdfs:label "chemical pest control process" ;
45 | rdfs:subClassOf .
46 |
47 | a owl:Class ;
48 | rdfs:label "no-till" ;
49 | rdfs:subClassOf ,
50 | .
51 |
52 | a owl:Class ;
53 | rdfs:label "puddling process" ;
54 | rdfs:subClassOf .
55 |
56 | a owl:Class ;
57 | rdfs:label "mulch-till" ;
58 | rdfs:subClassOf .
59 |
60 | a owl:Class ;
61 | rdfs:label "ridge-till" ;
62 | rdfs:subClassOf .
63 |
64 | a owl:Class ;
65 | rdfs:label "strip-till" ;
66 | rdfs:subClassOf .
67 |
68 | a owl:Class ;
69 | rdfs:label "aerial application" ;
70 | rdfs:subClassOf .
71 |
--------------------------------------------------------------------------------
/test-data/de.tsv:
--------------------------------------------------------------------------------
1 | DifferentialExpression concern@transcript logFC FC PValue Expression Significance FDR Leaves/Leaves Roots/Roots
2 | DE001 AT3G10490 -11.5741248327485 3049.00929422056 1.3364869977085e-09 LeavesRoots 1 9.70851435297687e-07 26590.0333537811 81.2619011254912
5 | DE004 AT1G57800 9.20759897504639 591.239552393754 4.99876947134223e-10 Leaves>Roots 1 9.70851435297687e-07 11700.3434320124 19.656255689533
6 | DE005 AT1G49500 -10.3470410152063 1302.47594721622 1.57559426781879e-09 Leaves\nPREFIX askomics: \nPREFIX dc: \nPREFIX dcat: \nPREFIX faldo: \nPREFIX owl: \nPREFIX prov: \nPREFIX rdf: \nPREFIX rdfs: \nPREFIX skos: \nPREFIX xsd: \n\nSELECT DISTINCT ?s ?p ?o\nWHERE {\n ?s ?p ?o\n}\n",
3 | "diskSpace": ###SIZE###,
4 | "console_enabled": true,
5 | "endpoints": {
6 | "###LOCAL_ENDPOINT###": {
7 | "name": "local triplestore",
8 | "selected": true,
9 | "uri": "###LOCAL_ENDPOINT###"
10 | }
11 | },
12 | "error": false,
13 | "errorMessage": "",
14 | "graphs": {
15 | "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###": {
16 | "name": "de.tsv",
17 | "selected": true,
18 | "uri": "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###"
19 | },
20 | "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###": {
21 | "name": "gene.bed",
22 | "selected": true,
23 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###"
24 | },
25 | "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###": {
26 | "name": "gene.gff3",
27 | "selected": true,
28 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###"
29 | },
30 | "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###": {
31 | "name": "qtl.tsv",
32 | "selected": true,
33 | "uri": "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###"
34 | },
35 | "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###": {
36 | "name": "transcripts.tsv",
37 | "selected": true,
38 | "uri": "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###"
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/tests/results/preview.json:
--------------------------------------------------------------------------------
1 | {
2 | "error": false,
3 | "errorMessage": "",
4 | "header": [
5 | "transcript1_Label"
6 | ],
7 | "id": 1,
8 | "preview": [
9 | {
10 | "transcript1_Label": "label_AT1G57800"
11 | },
12 | {
13 | "transcript1_Label": "label_AT5G35334"
14 | },
15 | {
16 | "transcript1_Label": "label_AT3G10460"
17 | },
18 | {
19 | "transcript1_Label": "label_AT1G49500"
20 | },
21 | {
22 | "transcript1_Label": "label_AT3G10490"
23 | },
24 | {
25 | "transcript1_Label": "label_AT3G51470"
26 | },
27 | {
28 | "transcript1_Label": "label_AT5G41905"
29 | },
30 | {
31 | "transcript1_Label": "label_AT1G33615"
32 | },
33 | {
34 | "transcript1_Label": "label_AT3G22640"
35 | },
36 | {
37 | "transcript1_Label": "label_AT3G13660"
38 | },
39 | {
40 | "transcript1_Label": "AT1G01010.1"
41 | },
42 | {
43 | "transcript1_Label": "AT1G01010.2"
44 | },
45 | {
46 | "transcript1_Label": "AT1G01010.3"
47 | }
48 | ]
49 | }
50 |
--------------------------------------------------------------------------------
/tests/results/preview_malformed_files.json:
--------------------------------------------------------------------------------
1 | {
2 | "error": false,
3 | "errorMessage": "",
4 | "previewFiles": [
5 | {
6 | "data": {
7 | "columns_type": [
8 | "start_entity",
9 | "text",
10 | "text"
11 | ],
12 | "content_preview": [],
13 | "header": [
14 | "",
15 | "column2",
16 | "column3"
17 | ]
18 | },
19 | "id": 6,
20 | "name": "malformed.tsv",
21 | "type": "csv/tsv",
22 | "error": true,
23 | "error_message": "Malformated CSV/TSV (Empty column in header)"
24 | }
25 | ]
26 | }
27 |
--------------------------------------------------------------------------------
/tests/results/query.sparql:
--------------------------------------------------------------------------------
1 | PREFIX :
2 | PREFIX askomics:
3 | PREFIX dc:
4 | PREFIX faldo:
5 | PREFIX owl:
6 | PREFIX prov:
7 | PREFIX rdf:
8 | PREFIX rdfs:
9 | PREFIX xsd:
10 |
11 | SELECT DISTINCT ?transcript1_Label
12 | WHERE {
13 |
14 | ?transcript1_uri rdf:type .
15 | ?transcript1_uri rdfs:label ?transcript1_Label .
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/tests/results/result.csv:
--------------------------------------------------------------------------------
1 | Gene1_Label
2 | AT001
3 | AT001
4 | AT002
5 | AT002
6 | AT003
7 | AT003
8 | AT004
9 | AT004
10 | AT005
11 | AT005
12 | BN001
13 | BN001
14 | BN002
15 | BN002
16 | BN003
17 | BN003
18 |
--------------------------------------------------------------------------------
/tests/results/results_admin.json:
--------------------------------------------------------------------------------
1 | {
2 | "error": false,
3 | "errorMessage": "",
4 | "queries": [
5 | {
6 | "description": "###DESC###",
7 | "end": ###END###,
8 | "execTime": ###EXECTIME###,
9 | "id": ###ID###,
10 | "nrows": 13,
11 | "public": ###PUBLIC###,
12 | "size": ###SIZE###,
13 | "start": ###START###,
14 | "status": "success",
15 | "user": "jsmith"
16 | }
17 | ]
18 | }
19 |
--------------------------------------------------------------------------------
/tests/results/results_linked_uri.json:
--------------------------------------------------------------------------------
1 | {
2 | "error": false,
3 | "errorMessage": "",
4 | "headerPreview": [
5 | "linked_uri1_Label",
6 | "test_uri4_uri",
7 | "test_uri1_Label"
8 | ],
9 | "resultsPreview": [
10 | {
11 | "linked_uri1_Label": "luri1",
12 | "test_uri1_Label": "myuri",
13 | "test_uri4_uri": "https://myuri.com/myuri"
14 | },
15 | {
16 | "linked_uri1_Label": "luri2",
17 | "test_uri1_Label": "myuri2",
18 | "test_uri4_uri": "http://www.w3.org/1999/02/22-rdf-syntax-ns#myuri2"
19 | },
20 | {
21 | "linked_uri1_Label": "luri3",
22 | "test_uri1_Label": "myuri3",
23 | "test_uri4_uri": "http://askomics.org/test/data/myuri3"
24 | }
25 | ]
26 | }
27 |
--------------------------------------------------------------------------------
/tests/results/results_uri.json:
--------------------------------------------------------------------------------
1 | {
2 | "error": false,
3 | "errorMessage": "",
4 | "headerPreview": [
5 | "test_uri1_uri",
6 | "test_uri1_Label"
7 | ],
8 | "resultsPreview": [
9 | {
10 | "test_uri1_Label": "myuri",
11 | "test_uri1_uri": "https://myuri.com/myuri"
12 | },
13 | {
14 | "test_uri1_Label": "myuri2",
15 | "test_uri1_uri": "http://www.w3.org/1999/02/22-rdf-syntax-ns#myuri2"
16 | },
17 | {
18 | "test_uri1_Label": "myuri3",
19 | "test_uri1_uri": "http://askomics.org/test/data/myuri3"
20 | },
21 | {
22 | "test_uri1_Label": "myuri4",
23 | "test_uri1_uri": "http://askomics.org/test/data/wrongprefix%3Amyuri4"
24 | }
25 | ]
26 | }
27 |
--------------------------------------------------------------------------------
/tests/results/sparql_preview.json:
--------------------------------------------------------------------------------
1 | {
2 | "data": [
3 | {
4 | "transcript1_Label": "label_AT3G13660"
5 | },
6 | {
7 | "transcript1_Label": "label_AT3G10460"
8 | },
9 | {
10 | "transcript1_Label": "label_AT3G51470"
11 | },
12 | {
13 | "transcript1_Label": "label_AT5G35334"
14 | },
15 | {
16 | "transcript1_Label": "label_AT3G10490"
17 | },
18 | {
19 | "transcript1_Label": "label_AT3G22640"
20 | },
21 | {
22 | "transcript1_Label": "label_AT1G57800"
23 | },
24 | {
25 | "transcript1_Label": "label_AT1G49500"
26 | },
27 | {
28 | "transcript1_Label": "label_AT1G33615"
29 | },
30 | {
31 | "transcript1_Label": "label_AT5G41905"
32 | }
33 | ],
34 | "header": [
35 | "transcript1_Label"
36 | ]
37 | }
38 |
--------------------------------------------------------------------------------
/tests/results/sparql_query.json:
--------------------------------------------------------------------------------
1 | {
2 | "diskSpace": ###SIZE###,
3 | "endpoints": {
4 | "###LOCAL_ENDPOINT###": {
5 | "name": "local triplestore",
6 | "selected": true,
7 | "uri": "###LOCAL_ENDPOINT###"
8 | }
9 | },
10 | "console_enabled": true,
11 | "error": false,
12 | "errorMessage": "",
13 | "graphs": {
14 | "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###": {
15 | "name": "de.tsv",
16 | "selected": false,
17 | "uri": "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###"
18 | },
19 | "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###": {
20 | "name": "qtl.tsv",
21 | "selected": false,
22 | "uri": "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###"
23 | },
24 | "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###": {
25 | "name": "transcripts.tsv",
26 | "selected": true,
27 | "uri": "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###"
28 | },
29 | "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###": {
30 | "name": "gene.bed",
31 | "selected": false,
32 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###"
33 | },
34 | "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###": {
35 | "name": "gene.gff3",
36 | "selected": true,
37 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###"
38 | }
39 | },
40 | "query": "PREFIX : \nPREFIX askomics: \nPREFIX dc: \nPREFIX dcat: \nPREFIX faldo: \nPREFIX owl: \nPREFIX prov: \nPREFIX rdf: \nPREFIX rdfs: \nPREFIX skos: \nPREFIX xsd: \n\nSELECT DISTINCT ?transcript1_Label\nWHERE {\n ?transcript1_uri rdf:type .\n ?transcript1_uri rdfs:label ?transcript1_Label .\n\n\n\n}\n"
41 | }
42 |
--------------------------------------------------------------------------------
/tests/results/startpoints.json:
--------------------------------------------------------------------------------
1 | {
2 | "error":
3 | false,
4 | "errorMessage": "",
5 | "publicQueries": [],
6 | "publicFormQueries": [],
7 | "startpoints":
8 | [{
9 | "endpoints": [{
10 | "name": "local",
11 | "url": "http://localhost:8891/sparql"
12 | }, {
13 | "name": "local",
14 | "url": "http://localhost:8891/sparql"
15 | }],
16 | "entity":
17 | "http://askomics.org/test/data/transcript",
18 | "entity_label":
19 | "transcript",
20 | "graphs":
21 | [{
22 | "creator": "jdoe",
23 | "public": "false",
24 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###"
25 | },
26 | {
27 | "creator": "jdoe",
28 | "public": "false",
29 | "uri":
30 | "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###"
31 | }],
32 | "private":
33 | true,
34 | "public":
35 | false
36 | },
37 | {
38 | "endpoints": [{
39 | "name": "local",
40 | "url": "http://localhost:8891/sparql"
41 | }, {
42 | "name": "local",
43 | "url": "http://localhost:8891/sparql"
44 | }],
45 | "entity":
46 | "http://askomics.org/test/data/gene",
47 | "entity_label":
48 | "gene",
49 | "graphs": [{
50 | "creator":
51 | "jdoe",
52 | "public":
53 | "false",
54 | "uri":
55 | "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###"
56 | },
57 | {
58 | "creator":
59 | "jdoe",
60 | "public":
61 | "false",
62 | "uri":
63 | "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###"
64 | }],
65 | "private":
66 | true,
67 | "public":
68 | false
69 | },
70 | {
71 | "endpoints": [{
72 | "name": "local",
73 | "url": "http://localhost:8891/sparql"
74 | }],
75 | "entity":
76 | "http://askomics.org/test/data/DifferentialExpression",
77 | "entity_label":
78 | "DifferentialExpression",
79 | "graphs": [{
80 | "creator": "jdoe",
81 | "public": "false",
82 | "uri": "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###"
83 | }],
84 | "private":
85 | true,
86 | "public":
87 | false
88 | },
89 | {
90 | "endpoints": [{
91 | "name": "local",
92 | "url": "http://localhost:8891/sparql"
93 | }],
94 | "entity":
95 | "http://askomics.org/test/data/QTL",
96 | "entity_label":
97 | "QTL",
98 | "graphs": [{
99 | "creator": "jdoe",
100 | "public": "false",
101 | "uri": "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###"
102 | }],
103 | "private":
104 | true,
105 | "public":
106 | false
107 | }]
108 | }
109 |
--------------------------------------------------------------------------------
/tests/test_api_data.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from . import AskomicsTestCase
4 |
5 |
6 | class TestApiData(AskomicsTestCase):
7 | """Test AskOmics API /data/"""
8 |
9 | def test_get_uri(self, client):
10 | """test the /data/ route"""
11 | client.create_two_users()
12 | client.log_user("jdoe")
13 | client.upload_and_integrate()
14 |
15 | with open("tests/results/data_full.json", "r") as file:
16 | file_content = file.read()
17 | expected = json.loads(file_content)
18 |
19 | response = client.client.get('/api/data/AT3G10490')
20 |
21 | assert response.status_code == 200
22 | assert self.equal_objects(response.json, expected)
23 |
24 | def test_get_empty_uri(self, client):
25 | """test the /data/ route for an empty uri"""
26 | response = client.client.get('/api/data/random_uri')
27 |
28 | expected_empty_response = {
29 | 'data': [],
30 | 'error': False,
31 | 'errorMessage': ""
32 | }
33 |
34 | assert response.status_code == 200
35 | assert response.json == expected_empty_response
36 |
37 | def test_uri_access(self, client):
38 | """test the /data/ route for public and non public uris"""
39 | client.create_two_users()
40 | client.log_user("jdoe")
41 | client.upload_and_integrate()
42 |
43 | expected_empty_response = {
44 | 'data': [],
45 | 'error': False,
46 | 'errorMessage': ""
47 | }
48 |
49 | client.log_user("jsmith")
50 | response = client.client.get('/api/data/AT3G10490')
51 |
52 | assert response.status_code == 200
53 | assert response.json == expected_empty_response
54 |
55 | def test_public_access(self, client):
56 | """test the /data/ route for public and non public uris"""
57 | client.create_two_users()
58 | client.log_user("jdoe")
59 | client.upload_file("test-data/transcripts.tsv")
60 |
61 | client.integrate_file({
62 | "id": 1,
63 | "columns_type": ["start_entity", "label", "category", "text", "reference", "start", "end", "category", "strand", "text", "text", "date"]
64 | }, public=True)
65 |
66 | with open("tests/results/data_public.json", "r") as file:
67 | file_content = file.read()
68 | expected = json.loads(file_content)
69 |
70 | client.logout()
71 | response = client.client.get('/api/data/AT3G10490')
72 |
73 | assert response.status_code == 200
74 | assert self.equal_objects(response.json, expected)
75 |
--------------------------------------------------------------------------------
/tests/test_api_ontology.py:
--------------------------------------------------------------------------------
1 | from . import AskomicsTestCase
2 |
3 |
4 | class TestApiOntology(AskomicsTestCase):
5 | """Test AskOmics API /api/ontology/"""
6 |
7 | def test_local_autocompletion_protected(self, client):
8 | """ Test autocompletion on missing ontology"""
9 | query = "blabla"
10 | client.set_config("askomics", "protect_public", "true")
11 | response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query))
12 |
13 | assert response.status_code == 401
14 | assert len(response.json["results"]) == 0
15 |
16 | def test_local_autocompletion_missing_ontology(self, client):
17 | """ Test autocompletion on missing ontology"""
18 | query = "blabla"
19 | response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query))
20 |
21 | assert response.status_code == 404
22 | assert len(response.json["results"]) == 0
23 |
24 | def test_local_autocompletion(self, client):
25 | """test /api/ontology/AGRO/autocomplete route"""
26 | client.create_two_users()
27 | client.log_user("jdoe")
28 |
29 | client.create_ontology()
30 |
31 | query = "blabla"
32 | response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query))
33 |
34 | assert response.status_code == 200
35 | assert len(response.json["results"]) == 0
36 | assert response.json["results"] == []
37 |
38 | query = ""
39 | response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query))
40 |
41 | expected = [
42 | "desuckering",
43 | "irrigation water source role",
44 | "irrigation water quantity",
45 | "reduced tillage process",
46 | "laser land levelling process",
47 | "chemical pest control process",
48 | "no-till",
49 | "puddling process",
50 | "mulch-till",
51 | "ridge-till",
52 | "strip-till",
53 | "aerial application"
54 | ]
55 |
56 | assert response.status_code == 200
57 | assert len(response.json["results"]) == 12
58 |
59 | # SPARQL order is not reliable, so we make sure to return everything
60 | # If it fails, skip this
61 | assert self.equal_objects(response.json["results"], expected)
62 |
63 | query = "irrigation"
64 | response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query))
65 |
66 | expected = [
67 | "irrigation water source role",
68 | "irrigation water quantity"
69 | ]
70 |
71 | assert response.status_code == 200
72 | assert len(response.json["results"]) == 2
73 | assert self.equal_objects(response.json["results"], expected)
74 |
--------------------------------------------------------------------------------
/tests/test_cleanup.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | ''
3 | from askomics.libaskomics.ResultsHandler import ResultsHandler
4 | from . import AskomicsTestCase
5 |
6 |
7 | class TestCleanup(AskomicsTestCase):
8 | """Test correct URI interpretation"""
9 |
10 | def test_cleanup_failed(self, client):
11 | """Test that users failed jobs are NOT deleted"""
12 | client.create_two_users()
13 | client.log_user("jdoe")
14 | client.upload_and_integrate()
15 |
16 | # Add an old job
17 | start = int((datetime.date.today() - datetime.timedelta(1)).strftime("%s"))
18 | client.create_result(status="error", start=start)
19 |
20 | rh = ResultsHandler(client.app, client.session)
21 | results = rh.get_files_info()
22 |
23 | assert len(results) == 1
24 | rh.delete_older_results(1, "hour", "0", "error")
25 | results = rh.get_files_info()
26 |
27 | assert len(results) == 1
28 |
29 | def test_cleanup_success(self, client):
30 | """Test that users jobs are NOT deleted"""
31 | client.create_two_users()
32 | client.log_user("jdoe")
33 | client.upload_and_integrate()
34 |
35 | # Add an old job
36 | start = int((datetime.date.today() - datetime.timedelta(1)).strftime("%s"))
37 | client.create_result(start=start)
38 |
39 | rh = ResultsHandler(client.app, client.session)
40 | results = rh.get_files_info()
41 |
42 | assert len(results) == 1
43 | rh.delete_older_results(1, "hour", "0")
44 | results = rh.get_files_info()
45 |
46 | assert len(results) == 1
47 |
48 | def test_anon_cleanup_failed(self, client):
49 | """Test that anon failed jobs are deleted"""
50 | client.create_two_users()
51 | client.log_user("jdoe")
52 | client.upload_and_integrate(public=True)
53 | client.logout()
54 |
55 | client.log_anon()
56 | # Add an old job
57 | start = int((datetime.date.today() - datetime.timedelta(1)).strftime("%s"))
58 | client.create_result(status="error", start=start)
59 |
60 | rh = ResultsHandler(client.app, client.session)
61 | results = rh.get_files_info()
62 |
63 | assert len(results) == 1
64 | rh.delete_older_results(1, "hour", "0", "error")
65 | results = rh.get_files_info()
66 |
67 | assert len(results) == 0
68 |
69 | def test_anon_cleanup_success(self, client):
70 | """Test that anon jobs are deleted"""
71 | client.create_two_users()
72 | client.log_user("jdoe")
73 | client.upload_and_integrate(public=True)
74 | client.logout()
75 |
76 | client.log_anon()
77 | # Add an old job
78 | start = int((datetime.date.today() - datetime.timedelta(1)).strftime("%s"))
79 | client.create_result(start=start)
80 |
81 | rh = ResultsHandler(client.app, client.session)
82 | results = rh.get_files_info()
83 |
84 | assert len(results) == 1
85 | rh.delete_older_results(1, "hour", "0")
86 | results = rh.get_files_info()
87 |
88 | assert len(results) == 0
89 |
--------------------------------------------------------------------------------
/tests/test_uri.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from . import AskomicsTestCase
4 |
5 |
6 | class TestURIResults(AskomicsTestCase):
7 | """Test correct URI interpretation"""
8 |
9 | def test_uri(self, client):
10 | """Test entity uri interpretation"""
11 | client.create_two_users()
12 | client.log_user("jdoe")
13 | client.upload_file("test-data/uris.csv")
14 |
15 | client.integrate_file({
16 | "id": 1,
17 | "columns_type": ["start_entity", "text"]
18 | })
19 |
20 | with open("tests/data/uri_query.json") as file:
21 | file_content = file.read()
22 |
23 | json_query = json.loads(file_content)
24 |
25 | with open("tests/results/results_uri.json") as file:
26 | file_content = file.read()
27 |
28 | expected = json.loads(file_content)
29 |
30 | response = client.client.post('/api/query/preview', json=json_query)
31 |
32 | assert response.status_code == 200
33 | assert self.equal_objects(response.json, expected)
34 |
35 | def test_linked_uri(self, client):
36 | """Test linked uri interpretation"""
37 | client.create_two_users()
38 | client.log_user("jdoe")
39 | client.upload_file("test-data/uris.csv")
40 | client.upload_file("test-data/linked_uris.csv")
41 |
42 | client.integrate_file({
43 | "id": 1,
44 | "columns_type": ["start_entity", "text"]
45 | })
46 |
47 | client.integrate_file({
48 | "id": 2,
49 | "columns_type": ["start_entity", "general_relation"]
50 | })
51 |
52 | with open("tests/data/linked_uri_query.json") as file:
53 | file_content = file.read()
54 |
55 | json_query = json.loads(file_content)
56 |
57 | with open("tests/results/results_linked_uri.json") as file:
58 | file_content = file.read()
59 |
60 | expected = json.loads(file_content)
61 |
62 | response = client.client.post('/api/query/preview', json=json_query)
63 | print(response.json)
64 |
65 | assert response.status_code == 200
66 | assert self.equal_objects(response.json, expected)
67 |
--------------------------------------------------------------------------------
/webpack.config.js:
--------------------------------------------------------------------------------
1 | const webpack = require('webpack');
2 | const TerserPlugin = require('terser-webpack-plugin');
3 |
4 | module.exports = (env, argv) => ({
5 | entry: [
6 | './askomics/react/src/index.jsx'
7 | ],
8 | module: {
9 | rules: [
10 | {
11 | test: /\.(js|jsx)$/,
12 | exclude: /node_modules/,
13 | use: ['babel-loader']
14 | },
15 | {
16 | test: /\.css$/,
17 | use: ['style-loader', 'css-loader']
18 | }
19 | ]
20 | },
21 | output: {
22 | path: __dirname + '/askomics/static/js',
23 | filename: 'askomics.js'
24 | },
25 | resolve: {
26 | extensions: ['.js', '.jsx'],
27 | },
28 | optimization: {
29 | minimize: (argv.mode === 'production') ? true : false,
30 | minimizer: [new TerserPlugin()],
31 | },
32 | });
33 |
34 | // module.exports = config;
35 |
--------------------------------------------------------------------------------