26 | )
27 | }
28 | }
29 |
30 | OntoLinkView.propTypes = {
31 | link: PropTypes.object,
32 | handleRecursiveOntology: PropTypes.func
33 | }
34 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # AskOmics
2 |
3 | [](https://github.com/askomics/flaskomics/actions?query=workflow%3A%22Lint+and+test%22)
4 | [](https://coveralls.io/github/askomics/flaskomics?branch=master)
5 | [](https://hub.docker.com/r/askomics/flaskomics/)
6 | [](https://flaskomics.readthedocs.io/en/latest/?badge=latest)
7 |
8 | Rebuild of [AskOmics](https://github.com/askomics/askomics)
9 |
10 | 
11 |
12 | AskOmics is a visual SPARQL query interface supporting both intuitive data integration and querying while shielding the user from most of the technical difficulties underlying RDF and SPARQL.
13 |
14 |
15 | ## Documentation
16 |
17 | All documentation, included installation instruction is [here](https://flaskomics.readthedocs.io/en/latest/)
18 | A Galaxy Training tutorial is available [here](https://training.galaxyproject.org/training-material/topics/transcriptomics/tutorials/rna-seq-analysis-with-askomics-it/tutorial.html)
19 |
--------------------------------------------------------------------------------
/test-data/abstraction.nt:
--------------------------------------------------------------------------------
1 | .
2 | .
3 | .
4 | .
5 | "Uniprot Subcellular Location Cv" .
6 | .
7 | .
8 | .
9 | .
10 | "Annotation Type" .
11 |
--------------------------------------------------------------------------------
/docs/abstraction-overview.md:
--------------------------------------------------------------------------------
1 | Starting from 4.5, a new Abstraction button is available on the navigation bar.
2 | This will lead to a graph showing all nodes and the relations available to the user.
3 |
4 | # Abstraction overview
5 |
6 | This graph will only show the data available to the user (ie, only public data for anonymous users)
7 | You will be able to interact with the graph directly by dragging and scrolling.
8 | A 'Reset zoom' button is available to reset the zoom level.
9 |
10 | !!! note "Info"
11 | Clicking on a node will zoom on it.
12 |
13 | Two visualization modes are available: 2D and 3D.
14 |
15 | ## 2D mode
16 |
17 | 2D mode is the default visualization. In this mode, hovering over a node or a relation will highlight all related nodes, and display particles to show the direction of the relation
18 |
19 | {: .center}
20 |
21 | ## 3D mode
22 |
23 | You can toggle 3D mode by clicking on the '2D/3D' toggle at the top of the screen.
24 | In this mode, you can rotate using the left-click, zoom using the mouse wheel, and pan using the right-click.
25 |
26 | !!! warning
27 | There is no node/relation highlighting in this mode, due to performance issues
28 |
29 | {: .center}
30 |
--------------------------------------------------------------------------------
/askomics/libaskomics/FilesUtils.py:
--------------------------------------------------------------------------------
1 | from askomics.libaskomics.Database import Database
2 | from askomics.libaskomics.Params import Params
3 |
4 |
5 | class FilesUtils(Params):
6 | """Contain methods usefull in FilesHandler and ResultsdHandler"""
7 |
8 | def __init__(self, app, session):
9 | """init
10 |
11 | Parameters
12 | ----------
13 | app : Flask
14 | flask app
15 | session :
16 | AskOmics session, contain the user
17 | """
18 | Params.__init__(self, app, session)
19 |
20 | def get_size_occupied_by_user(self):
21 | """Get disk size occuped by file user (uploaded files and results)
22 |
23 | Returns
24 | -------
25 | int
26 | size un bytes
27 | """
28 | database = Database(self.app, self.session)
29 |
30 | query = '''
31 | SELECT SUM(size)
32 | FROM (
33 | SELECT size
34 | FROM results
35 | WHERE user_id = ?
36 | UNION ALL
37 | SELECT size
38 | FROM files
39 | WHERE user_id = ?
40 | )
41 | '''
42 |
43 | result = database.execute_sql_query(query, (self.session["user"]["id"], self.session["user"]["id"]))
44 |
45 | return 0 if result[0][0] is None else result[0][0]
46 |
--------------------------------------------------------------------------------
/askomics/react/src/routes/form_edit/entity.jsx:
--------------------------------------------------------------------------------
1 | import React, { Component} from 'react'
2 | import axios from 'axios'
3 | import { Input, FormGroup, CustomInput, FormFeedback, Label } from 'reactstrap'
4 | import { Redirect } from 'react-router-dom'
5 | import DatePicker from "react-datepicker";
6 | import ErrorDiv from '../error/error'
7 | import WaitingDiv from '../../components/waiting'
8 | import update from 'react-addons-update'
9 | import PropTypes from 'prop-types'
10 | import Utils from '../../classes/utils'
11 |
12 | export default class Entity extends Component {
13 | constructor (props) {
14 | super(props)
15 | this.utils = new Utils()
16 | this.state = {}
17 | }
18 |
19 |
20 | render () {
21 | let entity_id = this.props.entity_id
22 | let entity = this.props.entity
23 | let attribute_boxes = this.props.attribute_boxes
24 | return(
25 |
5 | AskOmics provide a visual representation of the user abstraction as a graph.
6 | By starting from a node of interest and iteratively selecting its neighbors,
7 | the user creates a path on an abstraction graph. This path can then be transformed
8 | into a SPARQL query that can be executed on the original dataset.
9 |
10 |
11 |
12 | Visit askomics.org to learn how to use and deploy AskOmics.
13 |
51 |
52 |
53 | )
54 | }
55 | }
56 |
57 | GraphFilters.propTypes = {
58 | graph: PropTypes.object,
59 | current: PropTypes.object,
60 | showFaldo: PropTypes.bool,
61 | handleFilterLinks: PropTypes.func,
62 | handleFilterNodes: PropTypes.func,
63 | handleFilterFaldo: PropTypes.func
64 | }
65 |
--------------------------------------------------------------------------------
/tests/test_uri.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from . import AskomicsTestCase
4 |
5 |
6 | class TestURIResults(AskomicsTestCase):
7 | """Test correct URI interpretation"""
8 |
9 | def test_uri(self, client):
10 | """Test entity uri interpretation"""
11 | client.create_two_users()
12 | client.log_user("jdoe")
13 | client.upload_file("test-data/uris.csv")
14 |
15 | client.integrate_file({
16 | "id": 1,
17 | "columns_type": ["start_entity", "text"]
18 | })
19 |
20 | with open("tests/data/uri_query.json") as file:
21 | file_content = file.read()
22 |
23 | json_query = json.loads(file_content)
24 |
25 | with open("tests/results/results_uri.json") as file:
26 | file_content = file.read()
27 |
28 | expected = json.loads(file_content)
29 |
30 | response = client.client.post('/api/query/preview', json=json_query)
31 |
32 | assert response.status_code == 200
33 | assert self.equal_objects(response.json, expected)
34 |
35 | def test_linked_uri(self, client):
36 | """Test linked uri interpretation"""
37 | client.create_two_users()
38 | client.log_user("jdoe")
39 | client.upload_file("test-data/uris.csv")
40 | client.upload_file("test-data/linked_uris.csv")
41 |
42 | client.integrate_file({
43 | "id": 1,
44 | "columns_type": ["start_entity", "text"]
45 | })
46 |
47 | client.integrate_file({
48 | "id": 2,
49 | "columns_type": ["start_entity", "general_relation"]
50 | })
51 |
52 | with open("tests/data/linked_uri_query.json") as file:
53 | file_content = file.read()
54 |
55 | json_query = json.loads(file_content)
56 |
57 | with open("tests/results/results_linked_uri.json") as file:
58 | file_content = file.read()
59 |
60 | expected = json.loads(file_content)
61 |
62 | response = client.client.post('/api/query/preview', json=json_query)
63 | print(response.json)
64 |
65 | assert response.status_code == 200
66 | assert self.equal_objects(response.json, expected)
67 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | AskOmics is a visual SPARQL query interface supporting both intuitive data integration and querying while shielding the user from most of the technical difficulties underlying RDF and SPARQL.
2 |
3 | AskOmics helps loading heterogeneous data (formatted as tabular files, GFF, BED or native RDF files) into an RDF triplestore, to be transparently and interactively queried through a user-friendly interface.
4 |
5 | AskOmics also support federated queries to external SPARQL endpoints.
6 |
7 | {: .center}
8 |
9 | - AskOmics Tutorials
10 | - [Overview tutorial](tutorial.md): How to use AskOmics with example data
11 | - [Prepare your data](data.md): How to format your own data for AskOmics
12 | - [Building a query](query.md): Learn how the query building interface works
13 | - [Results interface](results.md): In-depth guide of the results interface
14 | - [Template & forms](template.md): An overview of the templates & forms functionalities
15 | - [SPARQL console](console.md): How to interact with the provided SPARQL console
16 | - [Command-line interface](cli.md): A python-based CLI for interacting with AskOmics
17 | - [Build an RDF abstraction](abstraction.md): Learn how to build an RDF abstraction for RDF data
18 | - [Perform federated queries](federation.md): How to query your own data with external resources
19 | - [Use AskOmics with Galaxy](galaxy.md): How to connect AskOmics with your Galaxy history
20 | - [Link your data to ontologies](ontologies.md): How to add ontologies to AskOmics, and connect your own data
21 |
22 |
23 | - Administration
24 | - [Deploy an instance](production-deployment.md): Deploy an AskOmics instance on your server
25 | - [Configuration](configure.md): Configure your instance
26 | - [Manage](manage.md): Manage your instance
27 | - [Add custom prefixes](prefixes.md): How to add custom prefixes for your users
28 |
29 |
30 | - Developer documentation
31 | - [Deploy a development instance locally](dev-deployment.md)
32 | - [Contribute to AskOmics](contribute.md)
33 | - [CI](ci.md): Test your code with continuous integration
34 | - [Contribute to doc](docs.md): Write documentation
35 |
--------------------------------------------------------------------------------
/tests/results/sparql_query.json:
--------------------------------------------------------------------------------
1 | {
2 | "diskSpace": ###SIZE###,
3 | "endpoints": {
4 | "###LOCAL_ENDPOINT###": {
5 | "name": "local triplestore",
6 | "selected": true,
7 | "uri": "###LOCAL_ENDPOINT###"
8 | }
9 | },
10 | "console_enabled": true,
11 | "error": false,
12 | "errorMessage": "",
13 | "graphs": {
14 | "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###": {
15 | "name": "de.tsv",
16 | "selected": false,
17 | "uri": "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###"
18 | },
19 | "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###": {
20 | "name": "qtl.tsv",
21 | "selected": false,
22 | "uri": "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###"
23 | },
24 | "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###": {
25 | "name": "transcripts.tsv",
26 | "selected": true,
27 | "uri": "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###"
28 | },
29 | "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###": {
30 | "name": "gene.bed",
31 | "selected": false,
32 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###"
33 | },
34 | "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###": {
35 | "name": "gene.gff3",
36 | "selected": true,
37 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###"
38 | }
39 | },
40 | "query": "PREFIX : \nPREFIX askomics: \nPREFIX dc: \nPREFIX dcat: \nPREFIX faldo: \nPREFIX owl: \nPREFIX prov: \nPREFIX rdf: \nPREFIX rdfs: \nPREFIX skos: \nPREFIX xsd: \n\nSELECT DISTINCT ?transcript1_Label\nWHERE {\n ?transcript1_uri rdf:type .\n ?transcript1_uri rdfs:label ?transcript1_Label .\n\n\n\n}\n"
41 | }
42 |
--------------------------------------------------------------------------------
/test-data/transcripts.tsv:
--------------------------------------------------------------------------------
1 | transcript label taxon featureName chromosomeName start end featureType strand biotype description date
2 | AT3G10490 label_AT3G10490 Arabidopsis_thaliana ANAC052 At3 3267835 3270883 gene plus protein_coding NAC_domain_containing_protein_52_[Source:TAIR%3BAcc:AT3G10490] 01/01/2000
3 | AT3G13660 label_AT3G13660 Arabidopsis_thaliana DIR22 At3 4464908 4465586 gene plus protein_coding Dirigent_protein_22_[Source:UniProtKB/Swiss-Prot%3BAcc:Q66GI2] 02/01/2000
4 | AT3G51470 label_AT3G51470 Arabidopsis_thaliana na At3 19097787 19099275 gene minus protein_coding Probable_protein_phosphatase_2C_47_[Source:UniProtKB/Swiss-Prot%3BAcc:Q9SD02] 03/01/2000
5 | AT3G10460 label_AT3G10460 Arabidopsis_thaliana na At3 3255800 3256439 gene plus protein_coding Plant_self-incompatibility_protein_S1_family_[Source:TAIR%3BAcc:AT3G10460] 04/01/2000
6 | AT3G22640 label_AT3G22640 Arabidopsis_thaliana PAP85 At3 8011724 8013902 gene minus protein_coding cupin_family_protein_[Source:TAIR%3BAcc:AT3G22640] 05/01/2000
7 | AT1G33615 label_AT1G33615 Arabidopsis_thaliana na At1 12193325 12194374 ncRNA_gene minus ncRNA other_RNA_[Source:TAIR%3BAcc:AT1G33615] 06/01/2000
8 | AT5G41905 label_AT5G41905 Arabidopsis_thaliana MIR166E At5 16775524 16775658 miRNA_gene minus miRNA MIR166/MIR166E%3B_miRNA_[Source:TAIR%3BAcc:AT5G41905] 07/01/2000
9 | AT1G57800 label_AT1G57800 Arabidopsis_thaliana ORTH3 At1 21408623 21412283 gene minus protein_coding E3_ubiquitin-protein_ligase_ORTHRUS_3_[Source:UniProtKB/Swiss-Prot%3BAcc:Q9FVS2] 08/01/2000
10 | AT1G49500 label_AT1G49500 Arabidopsis_thaliana na At1 18321295 18322284 gene minus protein_coding unknown_protein%3B_FUNCTIONS_IN:_molecular_function_unknown%3B_INVOLVED_IN:_biological_process_unknown%3B_LOCATED_IN:_endomembrane_system%3B_EXPRESSED_IN:_19_plant_structures%3B_EXPRESSED_DURING:_10_growth_stages%3B_BEST_Arabidopsis_thaliana_protein_match_is:_u_/.../_protein_(TAIR:AT3G19030.1)%3B_Has_24_Blast_hits_to_24_proteins_in_2_species:_Archae_-_0%3B_Bacteria_-_0%3B_Metazoa_-_0%3B_Fungi_-_0%3B_Plants_-_24%3B_Viruses_-_0%3B_Other_Eukaryotes_-_0_(source:_NCBI_BLink)._[Source:TAIR%3BAcc:AT1G49500] 09/01/2000
11 | AT5G35334 label_AT5G35334 Arabidopsis_thaliana na At5 13537917 13538984 gene transposable_element transposable_element_gene_[Source:TAIR%3BAcc:AT5G35334] 10/01/2000
12 |
--------------------------------------------------------------------------------
/tests/data/uri_query.json:
--------------------------------------------------------------------------------
1 | {
2 | "graphState": {
3 | "attr": [
4 | {
5 | "entityLabel": "test_uri",
6 | "entityUris": [
7 | "http://askomics.org/test/data/test_uri"
8 | ],
9 | "faldo": false,
10 | "filterType": "exact",
11 | "filterValue": "",
12 | "humanNodeId": 1,
13 | "id": 2,
14 | "label": "Uri",
15 | "linked": false,
16 | "linkedWith": null,
17 | "negative": false,
18 | "nodeId": 1,
19 | "optional": false,
20 | "type": "uri",
21 | "uri": "rdf:type",
22 | "visible": true
23 | },
24 | {
25 | "entityLabel": "test_uri",
26 | "entityUris": [
27 | "http://askomics.org/test/data/test_uri"
28 | ],
29 | "faldo": false,
30 | "filterType": "exact",
31 | "filterValue": "",
32 | "humanNodeId": 1,
33 | "id": 3,
34 | "label": "Label",
35 | "linked": false,
36 | "linkedWith": null,
37 | "negative": false,
38 | "nodeId": 1,
39 | "optional": false,
40 | "type": "text",
41 | "uri": "rdfs:label",
42 | "visible": true
43 | },
44 | {
45 | "entityLabel": "test_uri",
46 | "entityUris": [
47 | "http://askomics.org/test/data/test_uri"
48 | ],
49 | "faldo": null,
50 | "filterType": "exact",
51 | "filterValue": "",
52 | "humanNodeId": 1,
53 | "id": 4,
54 | "label": "mydata",
55 | "linked": false,
56 | "linkedWith": null,
57 | "negative": false,
58 | "nodeId": 1,
59 | "optional": false,
60 | "type": "text",
61 | "uri": "http://askomics.org/test/data/mydata",
62 | "visible": false
63 | }
64 | ],
65 | "links": [],
66 | "nodes": [
67 | {
68 | "__indexColor": "#ec0001",
69 | "faldo": false,
70 | "filterLink": "",
71 | "filterNode": "",
72 | "graphs": [
73 | "urn:sparql:askomics:1_jdoe:uris.csv_1623050448"
74 | ],
75 | "humanId": 1,
76 | "id": 1,
77 | "index": 0,
78 | "label": "test_uri",
79 | "selected": true,
80 | "specialNodeGroupId": null,
81 | "specialNodeId": null,
82 | "specialPreviousIds": [
83 | null,
84 | null
85 | ],
86 | "suggested": false,
87 | "type": "node",
88 | "uri": "http://askomics.org/test/data/test_uri",
89 | "vx": 0,
90 | "vy": 0,
91 | "x": 0,
92 | "y": 0
93 | }
94 | ]
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/docs/console.md:
--------------------------------------------------------------------------------
1 | A SPARQL console is available through AskOmics, allowing you to send direct SPARQL queries to the endpoint.
2 |
3 | !!! warning
4 | The console access is restricted to **logged users**
5 |
6 | !!! warning
7 | The default AskOmics configuration restrict SPARQL edition and query to the administrators.
8 | This can be disabled with the *enable_sparql_console* configuration option.
9 |
10 | {: .center}
11 |
12 | You can reach this console in two ways:
13 |
14 | # Console access
15 |
16 | - By clicking SPARQL of an existing result in the *Results* page
17 | - The console will be pre-filled with the generated SPARQL query of the result
18 | - Simply heading to the "/sparql" URL
19 | - The console will be pre-filled with a default SPARQL query
20 |
21 | # Editing your query
22 |
23 | You can edit the SPARQL query through the console to customize your query.
24 |
25 | ## Advanced options
26 |
27 | The **Advanced options** tab allows you to customize *how* the query will be sent.
28 | Namely, you will be able to select which endpoints and datasets the query will use, allowing you to fine-tune the query
29 |
30 | - For example, you can exclude some datasets to restrict the results.
31 |
32 | !!! note "Info"
33 | When accessing the console through the "Results" page, the datasets of interest (relevant to the query) will already be selected. Make sure to customize the selection if you modify the query.
34 |
35 | !!! note "Info"
36 | When accessing the console directly, all datasets will be selected (which can increase query time)
37 |
38 | # Launching query
39 |
40 | If you have **editing privileges** (either as an administrator, or through the configuration key), you will be able to either preview or save the query, much like a "normal" query.
41 |
42 | If you save the query, it will appears as a normal result in the "Results" tab. The basic functionalities (templates, download) will be available.
43 |
44 | !!! warning
45 | The Redo button will be disabled for results created from the console
46 |
47 | !!! warning
48 | The generated *template* will redirect to the SPARQL console. It means
49 |
50 | - Non-logged users will not be able to use it
51 | - Only logged users with **editing privileges** will be able to launch the query
52 |
--------------------------------------------------------------------------------
/askomics/react/src/routes/sparql/advancedsparql.jsx:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react'
2 | import axios from 'axios'
3 | import BootstrapTable from 'react-bootstrap-table-next'
4 | import { Collapse, CustomInput, Input, FormGroup, Label, Row, Col, Form, ButtonGroup, Button } from 'reactstrap'
5 | import update from 'react-addons-update'
6 | import PropTypes from 'prop-types'
7 |
8 | export default class AdvancedSparql extends Component {
9 | constructor (props) {
10 | super(props)
11 | this.state = {
12 | isAdvancedOpen: false
13 | }
14 | this.toogleAdvanced = this.toogleAdvanced.bind(this)
15 | }
16 |
17 |
18 | toogleAdvanced () {
19 | this.setState(state => ({ isAdvancedOpen: !state.isAdvancedOpen }))
20 | }
21 |
22 | render () {
23 |
24 | return (
25 |
93 | )
94 | }
95 | }
96 |
97 | Datasets.propTypes = {
98 | config: PropTypes.object,
99 | waitForStart: PropTypes.bool
100 | }
101 |
--------------------------------------------------------------------------------
/docs/results.md:
--------------------------------------------------------------------------------
1 | On the Results page, you will be able to see all your saved results (after using the Run & save button). Each row stores both the query and its results.
2 |
3 | # General information
4 |
5 | Result information can be found for each row :
6 |
7 | - Creation data: The creation time of this result
8 | - Exec time: The running time of the linked query
9 | - Status: Current status of the query
10 | - Possible values are 'Success', 'Queued', 'Started', 'Failure' and 'Deleting'
11 | - Rows: Number of rows in the result
12 | - Size: Size of the result file
13 |
14 | ## Description
15 |
16 | Each description can be customized by clicking on the field, and entering the desired value. You can use this to identify the query related to this result.
17 |
18 | !!! Warning
19 | Don't forget to save your new description using **Enter**
20 |
21 | !!! note "Info"
22 | The description will be displayed on the main page if you transform this query in a [template or form](template.md).
23 |
24 | # Templates and forms
25 |
26 | You can use the available toggle buttons if you wish to create a [template or form](template.md).
27 |
28 | !!! Warning
29 | Form creation is restricted to administrators. The related query must also be a [form-able query](template.md#Forms).
30 |
31 | # Publication
32 |
33 | The 'Public' toggle is available if you are an administrator. If will automatically create a public form (if the result is form-able), or a template. They will be accessible to **all users** from the main page.
34 |
35 | !!! Tip
36 | Make sure to set a custom description (and [customize your form](template.md#editing-the-form-display), if relevant) to help users understand your template/form.
37 |
38 | # Actions
39 |
40 | Several actions are also available for each result :
41 |
42 | ## Preview
43 |
44 | Preview directly re-launch the related query, and print a preview of the results.
45 | The preview will be shown under the results table.
46 |
47 | ## Download
48 |
49 | Clicking on Download will let you download a CSV file containing the results.
50 |
51 | ## Form
52 |
53 | Clicking on Form will let you customize the related form display.
54 |
55 | !!! Warning
56 | Only available for administator and form-able results.
57 |
58 | ## Redo
59 |
60 | Clicking on Redo will let you directly replay the query from the query interface. It will be in the exact same state as when you clicked on Run & save.
61 |
62 | !!! Warning
63 | Only available results generated from the query interface.
64 |
65 | ## Sparql
66 |
67 | Clicking on Sparql will redirect you to the [SPARQL console](console.md). You will be able to browse the SPARQL code generated by your query.
68 |
69 | !!! note "Info"
70 | Depending on your AskOmics configuration, you might be able to directly customize the query and launch it from the console.
71 |
72 | {: .center}
73 |
74 | # Deletion
75 |
76 | To delete one or more results, simply select them in the table, and use the "Delete" button at the bottom of the table.
77 |
78 | !!! Warning
79 | This will delete any template or form generated from the result.
80 |
--------------------------------------------------------------------------------
/askomics/react/src/routes/upload/uploadurlform.jsx:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react'
2 | import { Form, FormGroup, FormText, Label, Input, Button, CustomInput, Progress } from 'reactstrap'
3 | import axios from 'axios'
4 | import update from 'react-addons-update'
5 | import PropTypes from 'prop-types'
6 | import Utils from '../../classes/utils'
7 | import ErrorDiv from '../error/error'
8 |
9 | export default class UploadUrlForm extends Component {
10 | constructor (props) {
11 | super(props)
12 | this.utils = new Utils()
13 |
14 | this.state = {
15 | url: '',
16 | disabled: true,
17 | progressAnimated: true,
18 | progressValue: 0,
19 | progressDisplay: "",
20 | progressColor: "success",
21 | error: false,
22 | errorMessage: null,
23 | status: null
24 | }
25 |
26 | this.handleChange = this.handleChange.bind(this)
27 | this.handleSubmit = this.handleSubmit.bind(this)
28 | this.cancelRequest
29 | }
30 |
31 | handleChange (event) {
32 | this.setState({
33 | url: event.target.value,
34 | disabled: !this.utils.isUrl(event.target.value)
35 | })
36 | }
37 |
38 | handleSubmit (event) {
39 |
40 | let requestUrl = '/api/files/upload_url'
41 | let data = {
42 | url: this.state.url
43 | }
44 |
45 | this.setState({
46 | disabled: true,
47 | progressAnimated: true,
48 | progressValue: 0,
49 | progressDisplay: "0 %",
50 | progressColor: "success"
51 | })
52 |
53 | axios.post(requestUrl, data, { baseURL: this.props.config.proxyPath, cancelToken: new axios.CancelToken((c) => { this.cancelRequest = c }) })
54 | .then(response => {
55 | console.log(requestUrl, response.data)
56 | this.setState({
57 | disabled: false,
58 | progressAnimated: false,
59 | progressValue: 100,
60 | progressDisplay: "100 %",
61 | progressColor: "success"
62 | })
63 | this.props.getFiles()
64 | })
65 | .catch(error => {
66 | console.log(error, error.response.data.errorMessage)
67 | this.setState({
68 | disabled: false,
69 | progressAnimated: false,
70 | progressValue: 100,
71 | progressDisplay: "ERROR",
72 | progressColor: "error",
73 | error: true,
74 | errorMessage: error.response.data.errorMessage,
75 | status: error.response.status,
76 | })
77 | })
78 |
79 | }
80 |
81 | render () {
82 | return (
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 | )
92 | }
93 | }
94 |
95 | UploadUrlForm.propTypes = {
96 | setStateUpload: PropTypes.func,
97 | getFiles: PropTypes.func,
98 | config: PropTypes.object
99 | }
100 |
--------------------------------------------------------------------------------
/tests/results/startpoints.json:
--------------------------------------------------------------------------------
1 | {
2 | "error":
3 | false,
4 | "errorMessage": "",
5 | "publicQueries": [],
6 | "publicFormQueries": [],
7 | "startpoints":
8 | [{
9 | "endpoints": [{
10 | "name": "local",
11 | "url": "http://localhost:8891/sparql"
12 | }, {
13 | "name": "local",
14 | "url": "http://localhost:8891/sparql"
15 | }],
16 | "entity":
17 | "http://askomics.org/test/data/transcript",
18 | "entity_label":
19 | "transcript",
20 | "graphs":
21 | [{
22 | "creator": "jdoe",
23 | "public": "false",
24 | "uri": "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###"
25 | },
26 | {
27 | "creator": "jdoe",
28 | "public": "false",
29 | "uri":
30 | "urn:sparql:askomics_test:1_jdoe:transcripts.tsv_###TRANSCRIPTS_TIMESTAMP###"
31 | }],
32 | "private":
33 | true,
34 | "public":
35 | false
36 | },
37 | {
38 | "endpoints": [{
39 | "name": "local",
40 | "url": "http://localhost:8891/sparql"
41 | }, {
42 | "name": "local",
43 | "url": "http://localhost:8891/sparql"
44 | }],
45 | "entity":
46 | "http://askomics.org/test/data/gene",
47 | "entity_label":
48 | "gene",
49 | "graphs": [{
50 | "creator":
51 | "jdoe",
52 | "public":
53 | "false",
54 | "uri":
55 | "urn:sparql:askomics_test:1_jdoe:gene.bed_###BED_TIMESTAMP###"
56 | },
57 | {
58 | "creator":
59 | "jdoe",
60 | "public":
61 | "false",
62 | "uri":
63 | "urn:sparql:askomics_test:1_jdoe:gene.gff3_###GFF_TIMESTAMP###"
64 | }],
65 | "private":
66 | true,
67 | "public":
68 | false
69 | },
70 | {
71 | "endpoints": [{
72 | "name": "local",
73 | "url": "http://localhost:8891/sparql"
74 | }],
75 | "entity":
76 | "http://askomics.org/test/data/DifferentialExpression",
77 | "entity_label":
78 | "DifferentialExpression",
79 | "graphs": [{
80 | "creator": "jdoe",
81 | "public": "false",
82 | "uri": "urn:sparql:askomics_test:1_jdoe:de.tsv_###DE_TIMESTAMP###"
83 | }],
84 | "private":
85 | true,
86 | "public":
87 | false
88 | },
89 | {
90 | "endpoints": [{
91 | "name": "local",
92 | "url": "http://localhost:8891/sparql"
93 | }],
94 | "entity":
95 | "http://askomics.org/test/data/QTL",
96 | "entity_label":
97 | "QTL",
98 | "graphs": [{
99 | "creator": "jdoe",
100 | "public": "false",
101 | "uri": "urn:sparql:askomics_test:1_jdoe:qtl.tsv_###QTL_TIMESTAMP###"
102 | }],
103 | "private":
104 | true,
105 | "public":
106 | false
107 | }]
108 | }
109 |
--------------------------------------------------------------------------------
/askomics/libaskomics/RdfGraph.py:
--------------------------------------------------------------------------------
1 | from askomics.libaskomics.Params import Params
2 |
3 | import rdflib
4 | from rdflib.namespace import Namespace
5 |
6 |
7 | class RdfGraph(Params):
8 | """rdflib.graph wrapper
9 |
10 | Attributes
11 | ----------
12 | namespace_internal : Namespace
13 | AskOmics napespace
14 | namespace_data : Namespace
15 | AskOmics prefix
16 | graph : Graph
17 | rdflib graph
18 | ntriple : int
19 | Number of triple in the graph
20 | """
21 |
22 | def __init__(self, app, session):
23 | """init
24 |
25 | Parameters
26 | ----------
27 | app : Flask
28 | Flask app
29 | session
30 | AskOmics session
31 | """
32 | Params.__init__(self, app, session)
33 |
34 | self.namespace_data = Namespace(self.settings.get('triplestore', 'namespace_data'))
35 | self.namespace_internal = Namespace(self.settings.get('triplestore', 'namespace_internal'))
36 |
37 | self.graph = rdflib.Graph()
38 | self.graph.bind('', self.namespace_data)
39 | self.graph.bind('askomics', self.namespace_internal)
40 | self.graph.bind('faldo', "http://biohackathon.org/resource/faldo/")
41 | self.graph.bind('dc', 'http://purl.org/dc/elements/1.1/')
42 | self.graph.bind('prov', 'http://www.w3.org/ns/prov#')
43 | self.graph.bind('dcat', 'http://www.w3.org/ns/dcat#')
44 | self.ntriple = 0
45 | self.percent = None
46 |
47 | def parse(self, source=None, publicID=None, format=None, location=None, file=None, data=None, **args):
48 | """Parse a RDF file"""
49 | self.graph.parse(source=source, publicID=publicID, format=format, location=location, file=file, data=data, **args)
50 |
51 | def add(self, triple):
52 | """Add a triple into the rdf graph
53 |
54 | Parameters
55 | ----------
56 | triple : tuple
57 | triple to add
58 | """
59 | self.graph.add(triple)
60 | self.ntriple += 1
61 |
62 | def remove(self, triple):
63 | """Remove a triple into the rdf graph
64 |
65 | Parameters
66 | ----------
67 | triple : tuple
68 | triple to remove
69 | """
70 | self.graph.remove(triple)
71 | self.ntriple -= 1
72 |
73 | def bind(self, a, b):
74 | """Bind a namespace
75 |
76 | Parameters
77 | ----------
78 | a : string
79 | prefix
80 | b : string
81 | namespace
82 | """
83 | self.graph.bind(a, b)
84 |
85 | def get_triple(self):
86 | """Get all triple"""
87 | for s, p, o in self.graph:
88 | yield s, p, o
89 |
90 | def serialize(self, destination=None, format='xml', base=None, encoding=None, **args):
91 | """Serialize the graph into a file
92 |
93 | Parameters
94 | ----------
95 | format : string
96 | rdf syntaxe
97 | encoding : string
98 | Encoding
99 | destination : string
100 | File destination
101 | """
102 | result = self.graph.serialize(destination=destination, format=format, base=base, encoding=encoding, **args)
103 |
104 | if destination is None:
105 | return result
106 |
--------------------------------------------------------------------------------
/docs/ontologies.md:
--------------------------------------------------------------------------------
1 | Starting for the 4.4 release, hierarchical ontologies (such as the NCBITAXON ontology) can be integrated in AskOmics.
2 | This will allow users to query on an entity, or on its ancestors and descendants
3 |
4 | # Registering an ontology (admin-only)
5 |
6 | !!! warning
7 | While not required for basic queries (and subClassOf queries), registering an ontology is required for enabling auto-completion, using non-default labels (ie: *skos:prefLabel*), and enable an integration shortcut for users.
8 |
9 |
10 | First, make sure to have the [abstraction file](/abstraction/#ontologies) ready. Upload it to AskOmics, and integrate it.
11 | Make sure *to set it public*.
12 |
13 | You can then head to Ontologies in the user tab. There, you will be able to create and delete ontologies.
14 |
15 | ## Creating an ontology
16 |
17 | Parameters to create an ontology are as follows:
18 |
19 | * Ontology name: the full name of the ontology: will be displayed when as a column type when integrating CSV files.
20 | * Ontology short name: the shortname of the ontology (ex: NCBITAXON). /!\ When using ols autocompleting, this needs to match an existing ols ontology
21 | * Ontology uri: The ontology uri in your abstraction file
22 | * Linked public dataset: The *public* dataset containing your classes (not necessarily your abstraction)
23 | * Label uri: The label predicated your classes are using. Default to rdfs:label
24 | * Autocomplete type: If local, autocomplete will work with a SPARQL query (local or federated). If OLS, it will be sent on the OLS endpoint.
25 |
26 | # Linking your data to an ontology
27 |
28 | This functionality will only work with CSV files. You will need to fill out a column with the terms uris.
29 | If the ontology has been registered, you can directly select the ontology's column type.
30 |
31 | {: .center}
32 |
33 | Else, you will need to set the header as you would for a relation, using the ontology uri as the remote entity.
34 |
35 | Ex: `is organism@http://purl.bioontology.org/ontology/NCBITAXON`
36 |
37 | # Querying data using ontological terms
38 |
39 | If your entity is linked to an ontology, the ontology will appears as a related entity on the graph view.
40 | From there, you will be able to directly print the linked term's attributes (label, or other)
41 |
42 | {: .center}
43 |
44 | If the ontology was registered (and an autocompletion type was selected), the label field will have autocompletion (starting after 3 characters).
45 |
46 | {: .center}
47 |
48 | ## Querying on hierarchical relations
49 |
50 | You can also query on a related term, to build queries such as :
51 |
52 | * Give me all entities related to the children of this term
53 | * Give me all entities related any ancestor of this term
54 |
55 | To do so, simply click on the linked ontology circle, fill out the required label (or other attribute), and click on the link between both ontologies to select the type of query (either *children of*, *descendants of*, *parents of*, *ancestors of*)
56 |
57 | {: .center}
58 |
59 | !!! warning
60 | The relation goes from the second ontology circle to the first. Thus, to get the *children of* a specific term, you will need to select the *children of* relation, and select the label on the **second** circle
61 |
--------------------------------------------------------------------------------
/.github/workflows/lint_test.yml:
--------------------------------------------------------------------------------
1 | name: Lint and test
2 | on: ["push", "pull_request"]
3 | jobs:
4 | lint:
5 | runs-on: ubuntu-latest
6 | steps:
7 | - name: Checkout
8 | uses: actions/checkout@v2
9 | - uses: actions/setup-python@v1
10 | with:
11 | python-version: 3.11
12 | - name: Install flake8
13 | run: pip install flake8
14 | - name: Flake8
15 | run: flake8 askomics tests cli --ignore=E501,W504
16 |
17 | node_test:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - uses: actions/checkout@v2
21 | - uses: actions/setup-node@v2-beta
22 | with:
23 | node-version: '10'
24 | - name: Install modules
25 | run: make install-js MODE=dev
26 | - name: Run ESLint
27 | run: make eslint MODE=dev TRAVIS=true
28 |
29 | py_test:
30 | runs-on: ubuntu-latest
31 | steps:
32 | - name: Checkout
33 | uses: actions/checkout@v2
34 | - uses: actions/setup-python@v1
35 | with:
36 | python-version: 3.11
37 | - name: Update apt cache
38 | run: sudo apt-get update
39 | - name: Install python-ldap deps
40 | run: sudo apt-get install libldap2-dev libsasl2-dev
41 | - name: Install python dev deps
42 | run: pip install ephemeris coveralls
43 | - name: Install deps
44 | run: make install-python MODE=dev
45 | - name: Launch containers
46 | run: |
47 | docker pull redis:4.0
48 | docker pull askomics/virtuoso:7.2.5.1
49 | docker pull bgruening/galaxy-stable:20.05
50 | docker pull xgaia/corese:latest
51 | docker pull xgaia/isql-api:2.1.1
52 | docker pull xgaia/simple-ldap:latest
53 | docker run -d --name virtuoso -p 8891:8890 -p 1112:1111 -e DBA_PASSWORD=dba -e DEFAULT_GRAPH=http://localhost:8891/DAV -t askomics/virtuoso:7.2.5.1 /bin/sh -c "netstat -nr | grep '^0\.0\.0\.0' | grep -oE '((1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])\.){3}(1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])' | grep -v '^0\.0\.0\.0' | sed 's/$/ askomics-host/' >> /etc/hosts && /virtuoso/virtuoso.sh"
54 | sleep 1m
55 | docker run -d --name redis -p 6380:6379 -t redis:4.0
56 | docker run -d --name galaxy -p 8081:80 -t bgruening/galaxy-stable:20.05
57 | docker run -d --name corese -p 8082:8080 -t xgaia/corese:latest /bin/sh -c "netstat -nr | grep '^0\.0\.0\.0' | grep -oE '((1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])\.){3}(1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])' | grep -v '^0\.0\.0\.0' | sed 's/$/ askomics-host/' >> /etc/hosts && /corese/start.sh"
58 | docker run -d --name isql-api -p 5051:5050 -e VIRTUOSO_HOST=askomics-host -e VIRTUOSO_ISQL_PORT=1112 -t xgaia/isql-api:2.1.1 /bin/sh -c "netstat -nr | grep '^0\.0\.0\.0' | grep -oE '((1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])\.){3}(1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])' | grep -v '^0\.0\.0\.0' | sed 's/$/ askomics-host/' >> /etc/hosts && sh /isqlapi/docker-run.sh"
59 | docker run -d --name ldap -p 8389:389 -e ORGANISATION_NAME=AskOmics -e SUFFIX='dc=askomics,dc=org' -e ROOT_USER=admin -e ROOT_PW_CLEAR=admin -e FIRST_USER=true -e USER_UID=jwick -e USER_GIVEN_NAME=John -e USER_SURNAME=Wick -e USER_EMAIL=john.wick@askomics.org -e USER_PW_CLEAR=jwick -t xgaia/simple-ldap:latest
60 | galaxy-wait -g http://localhost:8081 --timeout 900
61 | echo "Galaxy is online, waiting a bit more for admin user creation"
62 | sleep 1m
63 | - name: Run tests
64 | run: |
65 | make pytest MODE=dev TRAVIS=true
66 | - name: Coveralls
67 | env:
68 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
69 | run: |
70 | coveralls
71 |
--------------------------------------------------------------------------------
/askomics/react/src/routes/error/error.jsx:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react'
2 | import { Alert } from 'reactstrap'
3 | import { Redirect } from 'react-router'
4 | import PropTypes from 'prop-types'
5 | import Utils from '../../classes/utils'
6 |
7 | export default class ErrorDiv extends Component {
8 | constructor (props) {
9 | super(props)
10 | this.utils = new Utils()
11 | }
12 |
13 | render () {
14 | if (this.props.status == 401) {
15 | return
16 | }
17 |
18 | if (!this.props.error) {
19 | return null
20 | }
21 |
22 | let messages = {
23 | "404": this.utils.objectHaveKeys(this.props, "customMessages", "404") ? this.props.customMessages["404"] : "404 Not Found",
24 |
25 | "501": this.utils.objectHaveKeys(this.props, "customMessages", "501") ? this.props.customMessages["501"] : "501 Not Implemented",
26 | "502": this.utils.objectHaveKeys(this.props, "customMessages", "502") ? this.props.customMessages["502"] : "502 Bad Gateway",
27 | "503": this.utils.objectHaveKeys(this.props, "customMessages", "503") ? this.props.customMessages["503"] : "503 Service Unavailable",
28 | "504": this.utils.objectHaveKeys(this.props, "customMessages", "504") ? this.props.customMessages["504"] : "504 Gateway Time-out",
29 | "505": this.utils.objectHaveKeys(this.props, "customMessages", "505") ? this.props.customMessages["505"] : "505 HTTP Version not supported",
30 | "506": this.utils.objectHaveKeys(this.props, "customMessages", "506") ? this.props.customMessages["506"] : "506 Variant Also Negotiates",
31 | "507": this.utils.objectHaveKeys(this.props, "customMessages", "507") ? this.props.customMessages["507"] : "507 Insufficient storage",
32 | "508": this.utils.objectHaveKeys(this.props, "customMessages", "508") ? this.props.customMessages["508"] : "508 Loop detected",
33 | "509": this.utils.objectHaveKeys(this.props, "customMessages", "509") ? this.props.customMessages["509"] : "509 Bandwidth Limit Exceeded",
34 | "510": this.utils.objectHaveKeys(this.props, "customMessages", "510") ? this.props.customMessages["510"] : "510 Not extended",
35 | "511": this.utils.objectHaveKeys(this.props, "customMessages", "511") ? this.props.customMessages["511"] : "511 Network authentication required",
36 |
37 | "500": this.utils.objectHaveKeys(this.props, "customMessages", "500") ? this.props.customMessages["500"] : this.props.errorMessage ? this.props.errorMessage : "500 Internal Server Error",
38 | "200": this.props.errorMessage
39 | }
40 |
41 | let error
42 |
43 | if (Array.isArray(this.props.errorMessage)) {
44 | error = (
45 |
46 | {this.props.errorMessage.map((item, index) => (
47 |
108 | )
109 | }
110 | }
111 |
112 | Data.propTypes = {
113 | waitForStart: PropTypes.bool,
114 | config: PropTypes.object,
115 | match: PropTypes.object
116 | }
117 |
118 | export default withRouter(Data)
119 |
--------------------------------------------------------------------------------
/docs/galaxy.md:
--------------------------------------------------------------------------------
1 | Galaxy is a scientific workflow, data integration, and data and analysis persistence and publishing platform that aims to make computational biology accessible to research scientists that do not have computer programming or systems administration experience.
2 |
3 | A Galaxy Training tutorial is available [here](https://training.galaxyproject.org/training-material/topics/transcriptomics/tutorials/rna-seq-analysis-with-askomics-it/tutorial.html)
4 |
5 | AskOmics can be used with a Galaxy instance in two way:
6 |
7 | - With a dedicated AskOmics, import Galaxy datasets into AskOmics and export AskOmics results into Galaxy.
8 | - In Galaxy: use AskOmics Interactive Tool inside Galaxy
9 |
10 | # Link AskOmics with Galaxy
11 |
12 | ## Create a Galaxy API key
13 |
14 | On your Galaxy account, go to the top menu *User* → *API Keys* and copy your API key. Yhis API key is unique identifier that will be used for AskOmics to access to data.
15 |
16 |
17 |
18 | ## Enter Galaxy API key into your AskOmics account
19 |
20 | On AskOmics, got to Your Name → Account management → **Connect a Galaxy account** and enter the Galaxy URL and API Key.
21 |
22 | {: .center}
23 |
24 | Once a Galaxy account is added to AskOmics, you can access to all your Galaxy Datasets from AskOmics.
25 |
26 | ## Upload a file from Galaxy
27 |
28 | On the Files page, the Galaxy button can be used to browse Galaxy history and import a dataset.
29 |
30 | ## Send result and query to Galaxy
31 |
32 | On the Results page, the **Actions** column of the table have 2 Galaxy button.
33 |
34 | - Send result to Galaxy: Send the result file to the last recently used history
35 | - Send query to Galaxy: send the json graph state that represent the AskOmics query
36 |
37 | ## Import a saved query from Galaxy
38 |
39 | On the Ask! page, the Import Query can be used to import a saved query from Galaxy.
40 |
41 |
42 |
43 |
44 | # Galaxy AskOmics Interactive Tool
45 |
46 |
47 | Galaxy Interactive Tools (GxITs) are a method to run containerized tools that are interactive in nature into the Galaxy interface. AskOmics have his GxIT available into several instances:
48 |
49 | - [usegalaxy.eu](https://usegalaxy.eu)
50 | - [galaxy.genouest.org](https://galaxy.genouest.org)
51 |
52 | ## Launch AskOmics IT
53 |
54 | Search for the AskOmics Interactive tool using the search bar.
55 |
56 | {: .center}
57 |
58 |
59 | Choose input files to automatically upload them into AskOmics
60 |
61 | {: .center}
62 |
63 | !!! Tip
64 | You will able to add more input files later
65 |
66 | A dedicated AskOmics instance will be deployed into the Cluster. Wait few minutes and go to the instance using the `click here to display` link.
67 |
68 | {: .center}
69 |
70 | Once you are into your AskOmics instance, you can see your uploaded files into the Files tab.
71 |
72 | {: .center}
73 |
74 | ## Upload additional files
75 |
76 | in addition to the Computer and URL buttons, you can now use the galaxy button to import datasets from your galaxy histories
77 |
78 |
79 | {: .center}
80 |
81 | ## Integrate and Query
82 |
83 | follow the [tutorial](/tutorial#data-integration) to integrate and query your data.
84 |
85 | ## Export Results into your Galaxy history
86 |
87 | Once you have your result, Use the `Send result to Galaxy` to export a TSV file into your last recently used Galaxy history.
88 |
89 | {: .center}
90 |
--------------------------------------------------------------------------------
/docs/production-deployment.md:
--------------------------------------------------------------------------------
1 | In production, AskOmics is deployed using docker and docker-compose. `docker-compose.yml` templates are provided to deploy your own instance.
2 |
3 | # Prerequisites
4 |
5 | Install `git`
6 |
7 | ```bash
8 | # Debian/Ubuntu
9 | apt install -y git
10 | # Fedora
11 | dnf install -y git
12 | ```
13 |
14 | Install `docker`:
15 |
16 | - [Debian](https://docs.docker.com/install/linux/docker-ce/debian/)
17 | - [Ubuntu](https://docs.docker.com/install/linux/docker-ce/ubuntu/)
18 | - [Fedora](https://docs.docker.com/install/linux/docker-ce/fedora/)
19 |
20 | Install `docker-compose`:
21 |
22 | ```bash
23 | # Debian/Ubuntu
24 | apt install -y docker-compose
25 | # Fedora
26 | dnf install -y docker-compose
27 | ```
28 |
29 | # Deploy
30 |
31 | ## Download templates
32 |
33 | First, clone the [flaskomics-docker-compose](https://github.com/askomics/flaskomics-docker-compose) repository. It contain template files to deploy your AskOmics instance.
34 |
35 |
36 | ```bash
37 | git clone https://github.com/askomics/flaskomics-docker-compose.git
38 | ```
39 |
40 | This repo contains several directories, depending on your needs
41 |
42 | ```bash
43 | cd flaskomics-docker-compose
44 | ls -1
45 | ```
46 |
47 | Two directories are used for production deployment
48 |
49 | - `standalone`: deploy AskOmics with all its dependencies for a standalone usage
50 | - `federated`: deploy AskOmics with all its dependencies and a federated query engine for a federated usage (Cross external endpoint such as [NeXtProt](https://sparql.nextprot.org) with local data)
51 |
52 | Choose one of this directory depending of your needs
53 |
54 | ```bash
55 | cd federated
56 | ```
57 | ## Configure
58 |
59 | First, edit the `docker-compose.yml` file. You can change the main port:
60 |
61 | - `services` > `nginx` > `ports`: You can change the default port if `80` is already used on your machine. Example: `"8080:80"` to use `8080` instead of `80`.
62 |
63 | ### Virtuoso
64 |
65 | Then, configure virtuoso by editing `virtuoso.env`
66 |
67 | Edit `VIRT_Parameters_NumberOfBuffers` and `VIRT_Parameters_MaxDirtyBuffers` following rules described [here](https://github.com/askomics/flaskomics-docker-compose#configure-virtuoso).
68 |
69 | !!! warning
70 | Change the `DBA_PASSWORD` if you plan to expose the virtuoso endpoint. The password endpoint have to be the same in `askomics.env` > `ASKO_triplestore_password`
71 |
72 | ### Nginx (web proxy)
73 |
74 | Nginx is used to manage web redirection. Nginx configuration is in two files: `nginx.conf` and `nginx.env`. If you want to access the virtuoso endpoint, uncomment the `virtuoso` section in `nginx.conf`
75 |
76 |
77 | ### AskOmics
78 |
79 | All properties defined in `askomics.ini` can be configured via the environment variables in `askomics.env`. The environment variable should be prefixed with `ASKO_` and have a format like `ASKO_$SECTION_$KEY`. $SECTION and $KEY are case sensitive. *E.g.* property `footer_message` in the `askomics` section should be configured as `ASKO_askomics_footer_message=Welcome to my AskOmics!`.
80 |
81 | !!! warning
82 | Change `ASKO_flask_secret_key` and `ASKO_askomics_password_salt` to random string
83 |
84 | For more information about AskOmics configuration, see the [configuration](configure.md) section.
85 |
86 | #### First user
87 |
88 | Environment variables can also be used to create a user into AskOmics at first start. For this, use `CREATE_USER=true` User information can be configured with the following environment variables:
89 |
90 | - `USER_FIRST_NAME`: User first name (default: Ad)
91 | - `USER_LAST_NAME`: User last name (default: Min)
92 | - `USER_USERNAME`: Username (default: admin)
93 | - `USER_PASSWORD`: Password (default: admin)
94 | - `USER_EMAIL`: User email (default: admin@example.com)
95 | - `USER_APIKEY`: User API key (default: admin)
96 | - `GALAXY_API_KEY`: Galaxy URL linked to the user (optional)
97 | - `GALAXY_URL`: User Galaxy API Key (optional)
98 |
99 | The user will be created only if the users table of the database is empty.
100 |
--------------------------------------------------------------------------------
/docs/manage.md:
--------------------------------------------------------------------------------
1 | # Make commands
2 |
3 | Several commands are available to help manage your instance. These commands are available through `make` when launched from the same directory as the *Makefile*. (If you are running Askomics in a docker container, you will need to connect to it to launch these commands)
4 |
5 | You can run the `make help` command to get a list of available admin commands.
6 |
7 | # Updating namespaces
8 |
9 | Version 4.2 added the `/data/` route showing the properties linked to a node.
10 | To make sure that your URIs are properly redirecting to this route, you should make sure that the `namespace_data` and `namespace_internal` [configuration option](configure.md) are set to your instance *url*. Make sure to match either `http` or `https` depending on your instance, and don't forget `/data/` or `/internal/`.
11 |
12 | ## Updating an existing instance
13 | If you changed the namespaces after having already integrated some files, you will need to run two additional commands to update your existing data.
14 |
15 | - `make update-base-url`
16 | You will be prompted to enter the previous namespace url, and then the new one.
17 | You can either enter a partial namespace url (ex: `http://askomics.org/`) or the full one (ex: `http://askomics.org/data/`)
18 | In the latter case, you will need to run the command twice (once for each namespace)
19 |
20 | - `make clear-cache`
21 | This will clear the abstraction cache, making sure your data is synchronized with the new namespaces.
22 |
23 | # Single tenant mode
24 |
25 | Starting from release 4.4, the *Single tenant mode* is available through a configuration option.
26 | In Virtuoso, aggregating multiples graphs (using several FROM clauses) can be very costly for big/numerous graphs.
27 |
28 | Single tenant mode send all queries on all stored graphs, thus speeding up the queries. This means that **all graphs are public, and can be queried by any user**. This affect starting points, abstractions, and query.
29 |
30 | !!! warning
31 | If you are storing sensitive data on AskOmics, make sure to disable anonymous access and account creation when using *Single tenant mode*.
32 |
33 | !!! warning
34 | *Single tenant mode* has no effect on federated queries
35 |
36 | # Administrator panel
37 |
38 | Administrators have access to a specific panel in AskOmics.
39 | This Admin tab can be found after clicking on *Your Name ▾*.
40 |
41 | ## User management
42 |
43 | From the Admin tab, administrators are able to:
44 |
45 | - Create a new user account
46 | - Manage existing user accounts
47 | - Blocking an user account
48 | - Setting an user as an administrator
49 | - Updating an user's individual storage quota
50 | - Deleting an user
51 |
52 | They will also be able to check the last time of activity of an user.
53 |
54 | ## Files
55 |
56 | A list of all uploaded files is available. Administrators can delete a file at any time.
57 |
58 | ## Datasets
59 |
60 | All currently stored datasets are available. Administrators can publish, unpublish, and delete them.
61 |
62 | ## Forms / Templates
63 |
64 | A list of **public** forms and templates is available. Administator can unpublish them if need be.
65 |
66 | # Anonymous query
67 |
68 | Starting from release 4.5, the *Anonymous query mode* is available through a configuration option.
69 | This option allows anonymous users to create full queries (not only previews), and access the results/sparql console associated.
70 |
71 | To avoid overloading the server, anonymous queries are regularly deleted. (Every hour for failed queries, and every X days for successful jobs, as defined by the *anonymous_query_cleanup* variable (default 60)).
72 |
73 | !!! warning
74 | Anonymous users cannot create forms/templates, but admin can from the admin panel.
75 | Do keep in mind that anonymous jobs will be deleted at some point.
76 |
77 | !!! warning
78 | If you disable the *anonymous_query*, the job cleaner will not run. You will need to delete the jobs manually from the admin panel.
79 |
--------------------------------------------------------------------------------
/askomics/react/src/routes/account/update_password.jsx:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react'
2 | import axios from 'axios'
3 | import { Col, Row, Button, Form, FormGroup, Label, Input, FormText, Alert } from 'reactstrap'
4 | import ErrorDiv from '../error/error'
5 | import PropTypes from 'prop-types'
6 |
7 | export default class UpdatePassword extends Component {
8 | constructor (props) {
9 | super(props)
10 | this.handleChange = this.handleChange.bind(this)
11 | this.handleSubmit = this.handleSubmit.bind(this)
12 | this.state = {
13 | oldPassword: '',
14 | newPassword: '',
15 | confPassword: ''
16 | }
17 | this.cancelRequest
18 | }
19 |
20 | handleChange (event) {
21 | this.setState({
22 | [event.target.id]: event.target.value
23 | })
24 | }
25 |
26 | validateForm () {
27 | return (
28 | this.state.oldPassword.length > 0 &&
29 | this.state.newPassword.length > 0 &&
30 | this.state.newPassword == this.state.confPassword &&
31 | this.state.newpassword != this.state.oldPassword
32 | )
33 | }
34 |
35 | handleSubmit (event) {
36 | let requestUrl = '/api/auth/password'
37 | let data = {
38 | oldPassword: this.state.oldPassword,
39 | newPassword: this.state.newPassword,
40 | confPassword: this.state.confPassword
41 | }
42 |
43 | axios.post(requestUrl, data, { baseURL: this.props.config.proxyPath, cancelToken: new axios.CancelToken((c) => { this.cancelRequest = c }) })
44 | .then(response => {
45 | console.log(requestUrl, response.data)
46 | this.setState({
47 | isLoading: false,
48 | error: response.data.error,
49 | errorMessage: response.data.errorMessage,
50 | user: response.data.user,
51 | success: !response.data.error,
52 | status: response.data.error ? 500 : 200,
53 | oldPassword: '',
54 | newPassword: '',
55 | confPassword: ''
56 | })
57 | })
58 | .catch(error => {
59 | console.log(error, error.response.data.errorMessage)
60 | this.setState({
61 | error: true,
62 | errorMessage: error.response.data.errorMessage,
63 | status: error.response.status,
64 | success: !response.data.error,
65 | oldPassword: '',
66 | newPassword: '',
67 | confPassword: ''
68 | })
69 | })
70 | event.preventDefault()
71 | }
72 |
73 | render () {
74 | let successTick
75 | if (this.state.success) {
76 | successTick =
77 | }
78 | return (
79 |