From 695992b9d24b95d8008e3ed45e49ee3174b2c2ad Mon Sep 17 00:00:00 2001 From: mboudet Date: Wed, 29 Jun 2022 09:44:16 +0200 Subject: [PATCH] Dev onto clean (#351) Add cleaner ontology management --- askomics/api/admin.py | 10 +-- askomics/api/file.py | 2 +- askomics/api/ontology.py | 17 ++-- askomics/api/start.py | 3 +- askomics/libaskomics/Database.py | 21 +++++ askomics/libaskomics/Dataset.py | 14 ++- askomics/libaskomics/OntologyManager.py | 32 ++++--- askomics/libaskomics/SparqlQuery.py | 30 +++++-- .../react/src/components/autocomplete.jsx | 88 ++++++++++++++++--- askomics/react/src/routes.jsx | 3 +- askomics/static/css/askomics.css | 36 +++++++- config/askomics.ini.template | 4 + config/askomics.test.ini | 3 + package-lock.json | 52 +++++++++++ package.json | 2 +- test-data/agro_min.ttl | 70 +++++++++++++++ tests/conftest.py | 47 ++++++++-- tests/test_api.py | 3 +- tests/test_api_admin.py | 35 ++++---- tests/test_api_ontology.py | 73 +++++++++++++++ 20 files changed, 465 insertions(+), 80 deletions(-) create mode 100644 test-data/agro_min.ttl create mode 100644 tests/test_api_ontology.py diff --git a/askomics/api/admin.py b/askomics/api/admin.py index 631b8b1a..5200ebb5 100644 --- a/askomics/api/admin.py +++ b/askomics/api/admin.py @@ -698,12 +698,12 @@ def add_ontology(): 'errorMessage': "Missing parameter" }), 400 - name = data.get("name") - uri = data.get("uri") + name = data.get("name").strip() + uri = data.get("uri").strip() short_name = data.get("shortName") - type = data.get("type") + type = data.get("type").strip() dataset_id = data.get("datasetId") - label_uri = data.get("labelUri") + label_uri = data.get("labelUri").strip() om = OntologyManager(current_app, session) @@ -760,7 +760,7 @@ def add_ontology(): }), 400 try: - om.add_ontology(name, uri, short_name, dataset.id, dataset.graph_name, dataset.endpoint, type, label_uri) + om.add_ontology(name, uri, short_name, dataset.id, dataset.graph_name, dataset.endpoint, remote_graph=dataset.remote_graph, type=type, label_uri=label_uri) ontologies = om.list_full_ontologies() except Exception as e: traceback.print_exc(file=sys.stdout) diff --git a/askomics/api/file.py b/askomics/api/file.py index 7348323f..5d224030 100644 --- a/askomics/api/file.py +++ b/askomics/api/file.py @@ -347,7 +347,7 @@ def integrate(): endpoint = data["externalEndpoint"] or current_app.iniconfig.get('triplestore', 'endpoint') dataset = Dataset(current_app, session, dataset_info) - dataset.save_in_db(endpoint) + dataset.save_in_db(endpoint, data["externalGraph"]) data["dataset_id"] = dataset.id dataset_ids.append(dataset.id) task = current_app.celery.send_task('integrate', (session_dict, data, request.host_url)) diff --git a/askomics/api/ontology.py b/askomics/api/ontology.py index fb9ce736..fc1efe37 100644 --- a/askomics/api/ontology.py +++ b/askomics/api/ontology.py @@ -1,6 +1,7 @@ import traceback import sys -from askomics.api.auth import api_auth, login_required + +from askomics.api.auth import api_auth from askomics.libaskomics.OntologyManager import OntologyManager from flask import (Blueprint, current_app, jsonify, request, session) @@ -10,7 +11,6 @@ @onto_bp.route("/api/ontology//autocomplete", methods=["GET"]) @api_auth -@login_required def autocomplete(short_ontology): """Get the default sparql query @@ -19,8 +19,13 @@ def autocomplete(short_ontology): json """ + if "user" not in session and current_app.iniconfig.getboolean("askomics", "protect_public"): + return jsonify({ + "error": True, + "errorMessage": "Ontology {} not found".format(short_ontology), + "results": [] + }), 401 try: - # Disk space om = OntologyManager(current_app, session) ontology = om.get_ontology(short_name=short_ontology) if not ontology: @@ -37,7 +42,7 @@ def autocomplete(short_ontology): "results": [] }), 404 - results = om.autocomplete(ontology["uri"], ontology["type"], request.args.get("q"), short_ontology, ontology["graph"], ontology["endpoint"]) + results = om.autocomplete(ontology["uri"], ontology["type"], request.args.get("q"), short_ontology, ontology["graph"], ontology["endpoint"], ontology['label_uri'], ontology['remote_graph']) except Exception as e: traceback.print_exc(file=sys.stdout) @@ -45,10 +50,10 @@ def autocomplete(short_ontology): "error": True, "errorMessage": str(e), "results": [] - }), 404 + }), 500 return jsonify({ - "error": True, + "error": False, "errorMessage": "", "results": results }), 200 diff --git a/askomics/api/start.py b/askomics/api/start.py index 93ae4578..910af044 100644 --- a/askomics/api/start.py +++ b/askomics/api/start.py @@ -85,7 +85,8 @@ def start(): "user": {}, "logged": False, "ontologies": ontologies, - "singleTenant": current_app.iniconfig.getboolean('askomics', 'single_tenant', fallback=False) + "singleTenant": current_app.iniconfig.getboolean('askomics', 'single_tenant', fallback=False), + "autocompleteMaxResults": current_app.iniconfig.getint("askomics", "autocomplete_max_results", fallback=10) } json = { diff --git a/askomics/libaskomics/Database.py b/askomics/libaskomics/Database.py index 8a3c7e90..9fc02acb 100644 --- a/askomics/libaskomics/Database.py +++ b/askomics/libaskomics/Database.py @@ -220,6 +220,17 @@ def update_datasets_table(self): except Exception: pass + query = ''' + ALTER TABLE datasets + ADD remote_graph text NULL + DEFAULT(null) + ''' + + try: + self.execute_sql_query(query) + except Exception: + pass + def create_integration_table(self): """Create the integration table""" query = ''' @@ -455,3 +466,13 @@ def create_ontologies_table(self): self.execute_sql_query(query) except Exception: pass + + query = ''' + ALTER TABLE ontologies + ADD remote_graph text NULL + ''' + + try: + self.execute_sql_query(query) + except Exception: + pass diff --git a/askomics/libaskomics/Dataset.py b/askomics/libaskomics/Dataset.py index 87bae1ba..c3fd7b1b 100644 --- a/askomics/libaskomics/Dataset.py +++ b/askomics/libaskomics/Dataset.py @@ -46,6 +46,8 @@ def __init__(self, app, session, dataset_info={}): self.start = dataset_info["start"] if "start" in dataset_info else None self.end = dataset_info["end"] if "end" in dataset_info else None self.ontology = dataset_info["ontology"] if "ontology" in dataset_info else False + self.endpoint = dataset_info["endpoint"] if "endpoint" in dataset_info else False + self.remote_graph = dataset_info["remote_graph"] if "remote_graph" in dataset_info else False def set_info_from_db(self, admin=False): """Set the info in from the database""" @@ -59,7 +61,7 @@ def set_info_from_db(self, admin=False): where_query = "AND user_id = ?" query = ''' - SELECT celery_id, file_id, name, graph_name, public, start, end, ontology, endpoint + SELECT celery_id, file_id, name, graph_name, public, start, end, ontology, endpoint, remote_graph FROM datasets WHERE id = ? {} @@ -76,8 +78,9 @@ def set_info_from_db(self, admin=False): self.end = rows[0][6] self.ontology = rows[0][7] self.endpoint = rows[0][8] + self.remote_graph = rows[0][9] - def save_in_db(self, endpoint, set_graph=False): + def save_in_db(self, endpoint, remote_graph=None, set_graph=False): """Save the dataset into the database""" database = Database(self.app, self.session) @@ -89,7 +92,8 @@ def save_in_db(self, endpoint, set_graph=False): self.name, self.public, 0, - endpoint + endpoint, + remote_graph ) if set_graph: @@ -102,7 +106,8 @@ def save_in_db(self, endpoint, set_graph=False): self.graph_name, self.public, 0, - endpoint + endpoint, + remote_graph ) query = ''' @@ -122,6 +127,7 @@ def save_in_db(self, endpoint, set_graph=False): NULL, NULL, 0, + ?, ? ) '''.format(subquery) diff --git a/askomics/libaskomics/OntologyManager.py b/askomics/libaskomics/OntologyManager.py index 7a8a1e3f..c988f959 100644 --- a/askomics/libaskomics/OntologyManager.py +++ b/askomics/libaskomics/OntologyManager.py @@ -76,7 +76,7 @@ def list_full_ontologies(self): database = Database(self.app, self.session) query = ''' - SELECT ontologies.id, ontologies.name, ontologies.uri, ontologies.short_name, ontologies.type, ontologies.label_uri, datasets.id, datasets.name, ontologies.graph + SELECT ontologies.id, ontologies.name, ontologies.uri, ontologies.short_name, ontologies.type, ontologies.label_uri, datasets.id, datasets.name, ontologies.graph, ontologies.endpoint, ontologies.remote_graph FROM ontologies INNER JOIN datasets ON datasets.id=ontologies.dataset_id ''' @@ -94,7 +94,9 @@ def list_full_ontologies(self): 'label_uri': row[5], 'dataset_id': row[6], 'dataset_name': row[7], - 'graph': row[8] + 'graph': row[8], + 'endpoint': row[9], + 'remote_graph': row[10] } ontologies.append(prefix) @@ -123,7 +125,7 @@ def get_ontology(self, short_name="", uri=""): database = Database(self.app, self.session) query = ''' - SELECT id, name, uri, short_name, type, dataset_id, graph, label_uri + SELECT id, name, uri, short_name, type, dataset_id, graph, label_uri, endpoint, remote_graph FROM ontologies {} '''.format(where_clause) @@ -142,10 +144,12 @@ def get_ontology(self, short_name="", uri=""): 'type': ontology[4], 'dataset_id': ontology[5], 'graph': ontology[6], - 'label_uri': ontology[7] + 'label_uri': ontology[7], + 'endpoint': ontology[8], + 'remote_graph': ontology[9] } - def add_ontology(self, name, uri, short_name, dataset_id, graph, endpoint, type="local", label_uri="rdfs:label"): + def add_ontology(self, name, uri, short_name, dataset_id, graph, endpoint, remote_graph=None, type="local", label_uri="rdfs:label"): """Create a new ontology Returns @@ -167,11 +171,12 @@ def add_ontology(self, name, uri, short_name, dataset_id, graph, endpoint, type= ?, ?, ?, + ?, ? ) ''' - database.execute_sql_query(query, (name, uri, short_name, type, dataset_id, graph, label_uri, endpoint)) + database.execute_sql_query(query, (name, uri, short_name, type, dataset_id, graph, label_uri, endpoint, remote_graph)) query = ''' UPDATE datasets SET @@ -225,7 +230,7 @@ def test_ols_ontology(self, shortname): r = requests.get(base_url) return r.status_code == 200 - def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name, onto_graph, onto_endpoint): + def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name, onto_graph, onto_endpoint, custom_label, remote_graph): """Search in ontology Returns @@ -242,13 +247,18 @@ def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name, # TODO: Actually store the graph in the ontology to quicken search query.set_graphs([onto_graph]) query.set_endpoints(set([self.settings.get('triplestore', 'endpoint'), onto_endpoint])) - return query.autocomplete_local_ontology(ontology_uri, query_term, max_results) + if remote_graph: + query.set_remote_graph({onto_endpoint: [remote_graph]}) + + return query.autocomplete_local_ontology(ontology_uri, query_term, max_results, custom_label) elif ontology_type == "ols": - base_url = "https://www.ebi.ac.uk/ols/api/suggest" + base_url = "https://www.ebi.ac.uk/ols/api/select" arguments = { "q": query_term, "ontology": quote_plus(onto_short_name.lower()), - "rows": max_results + "rows": max_results, + "type": "class", + "fieldList": "label" } r = requests.get(base_url, params=arguments) @@ -260,6 +270,6 @@ def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name, res = r.json() if res['response']['docs']: - data = [term['autosuggest'] for term in res['response']['docs']] + data = [term['label'] for term in res['response']['docs']] return data diff --git a/askomics/libaskomics/SparqlQuery.py b/askomics/libaskomics/SparqlQuery.py index 69945cfb..57d2c29a 100644 --- a/askomics/libaskomics/SparqlQuery.py +++ b/askomics/libaskomics/SparqlQuery.py @@ -71,6 +71,16 @@ def set_endpoints(self, endpoints): """ self.endpoints = endpoints + def set_remote_graph(self, remote_graphs): + """Set endpoints + + Parameters + ---------- + endpoints : list + Endpoints + """ + self.remote_graphs = remote_graphs + def is_federated(self): """Return True if there is more than 1 endpoint @@ -299,11 +309,13 @@ def format_query(self, query, limit=30, replace_froms=True, federated=False, ign formatted sparql query """ froms = '' - if replace_froms and (not self.settings.getboolean("askomics", "single_tenant", fallback=False) or ignore_single_tenant): - froms = self.get_froms() if federated: - federated_line = "{}\n{}".format(self.get_federated_line(), self.get_federated_froms()) + federated_line = "" if self.settings.getboolean("askomics", "single_tenant", fallback=False) else "{}\n{}".format(self.get_federated_line(), self.get_federated_froms()) + federated_graphs_string = self.get_federated_remote_from_graphs() + else: + if replace_froms and (not self.settings.getboolean("askomics", "single_tenant", fallback=False)): + froms = self.get_froms() query_lines = query.split('\n') @@ -314,6 +326,7 @@ def format_query(self, query, limit=30, replace_froms=True, federated=False, ign if not line.upper().lstrip().startswith('FROM') and not line.upper().lstrip().startswith('LIMIT') and not line.upper().lstrip().startswith('@FEDERATE'): if line.upper().lstrip().startswith('SELECT') and federated: new_query += "\n{}\n".format(federated_line) + new_query += "\n{}\n".format(federated_graphs_string) new_query += '\n{}'.format(line) # Add new FROM if line.upper().lstrip().startswith('SELECT'): @@ -378,7 +391,6 @@ def get_federated_froms_from_graphs(self, graphs): from_string = "@from <{}>".format(self.local_endpoint_f) for graph in graphs: from_string += " <{}>".format(graph) - return from_string def get_federated_remote_from_graphs(self): @@ -567,7 +579,7 @@ def get_uri_parameters(self, uri, endpoints): return formated_data - def autocomplete_local_ontology(self, uri, query, max_terms): + def autocomplete_local_ontology(self, uri, query, max_terms, label): """Get results for a specific query Parameters @@ -590,17 +602,17 @@ def autocomplete_local_ontology(self, uri, query, max_terms): raw_query = ''' SELECT DISTINCT ?label WHERE {{ - ?uri rdf:type <{}> . - ?uri rdfs:label ?label . + ?uri rdf:type owl:Class . + ?uri {} ?label . {} }} - '''.format(uri, subquery) + '''.format(label, subquery) raw_query = self.prefix_query(raw_query) is_federated = self.is_federated() - sparql = self.format_query(raw_query, limit=max_terms, replace_froms=True, federated=is_federated, ignore_single_tenant=True) + sparql = self.format_query(raw_query, limit=max_terms, replace_froms=True, federated=is_federated) query_launcher = SparqlQueryLauncher(self.app, self.session, get_result_query=True, federated=is_federated) _, data = query_launcher.process_query(sparql) diff --git a/askomics/react/src/components/autocomplete.jsx b/askomics/react/src/components/autocomplete.jsx index 635881b1..452e9575 100644 --- a/askomics/react/src/components/autocomplete.jsx +++ b/askomics/react/src/components/autocomplete.jsx @@ -1,19 +1,23 @@ import React, { Component} from 'react' import axios from 'axios' import PropTypes from 'prop-types' -import TextInput from 'react-autocomplete-input'; -import 'react-autocomplete-input/dist/bundle.css'; +import { Input } from 'reactstrap' +import Autosuggest from 'react-autosuggest'; + export default class Autocomplete extends Component { constructor (props) { super(props) this.state = { ontologyShort: this.getAutoComplete(), + maxResults: this.props.config.autocompleteMaxResults, options: [] } this.handleFilterValue = this.props.handleFilterValue.bind(this) this.autocompleteOntology = this.autocompleteOntology.bind(this) + this.onSuggestionsFetchRequested = this.onSuggestionsFetchRequested.bind(this) + this.onSuggestionsClearRequested = this.onSuggestionsClearRequested.bind(this) this.cancelRequest this.handleOntoValue = this.handleOntoValue.bind(this) this.WAIT_INTERVAL = 500 @@ -38,7 +42,6 @@ export default class Autocomplete extends Component { axios.get(requestUrl, {baseURL: this.props.config.proxyPath, params:{q: userInput}, cancelToken: new axios.CancelToken((c) => { this.cancelRequest = c }) }) .then(response => { - // set state of resultsPreview this.setState({ options: response.data.results }) @@ -54,25 +57,84 @@ export default class Autocomplete extends Component { }) } - handleOntoValue (event) { - this.handleFilterValue(event) + + handleOntoValue (event, value) { + this.handleFilterValue({target:{value: value.newValue, id: this.props.attributeId}}) + } + + + renderSuggestion (suggestion, {query, isHighlighted}) { + let textArray = suggestion.split(RegExp(query, "gi")); + let match = suggestion.match(RegExp(query, "gi")); + + return ( + + {textArray.map((item, index) => ( + + {item} + {index !== textArray.length - 1 && match && ( + {match[index]} + )} + + ))} + + ); + } + + onSuggestionsClearRequested () { + this.setState({ + options: [] + }) + } + + getSuggestionValue (suggestion) { + return suggestion + }; + + onSuggestionsFetchRequested ( value ){ clearTimeout(this.timerID) this.timerID = setTimeout(() => { - this.autocompleteOntology(event.target.value) + this.autocompleteOntology(value.value) }, this.WAIT_INTERVAL) + }; + + + renderInputComponent (inputProps){ + return( +
+ +
+ ) } - renderAutocomplete () { - - let input = (
- this.handleOntoValue({target: {value: e, id: this.props.attributeId}})} id={this.props.attributeId} value={this.props.filterValue}/> -
) - return input + shouldRenderSuggestions(value, reason){ + return value.trim().length > 2; } render () { - return this.renderAutocomplete() + + let value = this.props.filterValue + + let inputProps = { + placeholder: '', + value, + onChange: this.handleOntoValue + }; + + return ( + + ) + } } diff --git a/askomics/react/src/routes.jsx b/askomics/react/src/routes.jsx index f0b7356b..94e92c7e 100644 --- a/askomics/react/src/routes.jsx +++ b/askomics/react/src/routes.jsx @@ -48,7 +48,8 @@ export default class Routes extends Component { namespaceData: null, namespaceInternal: null, ontologies: [], - singleTenant: false + singleTenant: false, + autocompleteMaxResults: 10 } } this.cancelRequest diff --git a/askomics/static/css/askomics.css b/askomics/static/css/askomics.css index c6525b2a..2d6a4065 100644 --- a/askomics/static/css/askomics.css +++ b/askomics/static/css/askomics.css @@ -276,9 +276,37 @@ button.input-with-icon { display: block; } -.react-autocomplete-input { - left: auto !important; - top: auto !important; +/***********************************************************************/ + + +.react-autosuggest__suggestions-container--open { + background-clip: padding-box; + background-color: #fff; + border: 1px solid rgba(0,0,0,0.15); + bottom: auto; + box-shadow: 0 6px 12px rgba(0,0,0,0.175); + display: block; + font-size: 14px; + list-style: none; + padding: 1px; + position: absolute; + text-align: left; + z-index: 20000; } -/***********************************************************************/ +.react-autosuggest__suggestions-list { + margin: 0; + padding: 0; + list-style-type: none; +} + +.react-autosuggest__suggestion { + cursor: pointer; + padding: 10px; + min-width: 100px; +} + +.react-autosuggest__suggestion--highlighted { + background-color: #0356fc; + color: #fff; +} diff --git a/config/askomics.ini.template b/config/askomics.ini.template index af284f2b..d861b8fe 100644 --- a/config/askomics.ini.template +++ b/config/askomics.ini.template @@ -83,6 +83,10 @@ ldap_mail_attribute = mail #ldap_password_reset_link = #ldap_account_link = +# Max results returned for autocompletion +autocomplete_max_results = 10 + + [triplestore] # name of the triplestore, can be virtuoso or fuseki triplestore = virtuoso diff --git a/config/askomics.test.ini b/config/askomics.test.ini index f2afac84..6d9bd17c 100644 --- a/config/askomics.test.ini +++ b/config/askomics.test.ini @@ -76,6 +76,7 @@ ldap_surname_attribute = sn ldap_mail_attribute = mail #ldap_password_reset_link = #ldap_account_link = +autocomplete_max_results = 20 [triplestore] # name of the triplestore, can be virtuoso or fuseki @@ -132,3 +133,5 @@ local_endpoint=http://askomics-host:8891/sparql # Sentry dsn to report python and js errors in a sentry instance # server_dsn = https://00000000000000000000000000000000@exemple.org/1 # frontend_dsn = https://00000000000000000000000000000000@exemple.org/2 + +# Max results returned for autocompletion diff --git a/package-lock.json b/package-lock.json index 0b75625b..04f514fa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4547,6 +4547,11 @@ "is-symbol": "^1.0.2" } }, + "es6-promise": { + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", + "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" + }, "escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", @@ -5513,6 +5518,11 @@ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true }, + "get-input-selection": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/get-input-selection/-/get-input-selection-1.1.4.tgz", + "integrity": "sha512-o3rv95OOpoHznujIEwZljNhUM9efW/gZsIKCQtTrjRU4PkneVpDvxNBmC7kXC4519lZYT95DKcdj0A5f9GZkKg==" + }, "get-intrinsic": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", @@ -8625,6 +8635,18 @@ "object-assign": "^4.1.0" } }, + "react-autosuggest": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/react-autosuggest/-/react-autosuggest-10.1.0.tgz", + "integrity": "sha512-/azBHmc6z/31s/lBf6irxPf/7eejQdR0IqnZUzjdSibtlS8+Rw/R79pgDAo6Ft5QqCUTyEQ+f0FhL+1olDQ8OA==", + "requires": { + "es6-promise": "^4.2.8", + "prop-types": "^15.7.2", + "react-themeable": "^1.1.0", + "section-iterator": "^2.0.0", + "shallow-equal": "^1.2.1" + } + }, "react-bootstrap-table-next": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/react-bootstrap-table-next/-/react-bootstrap-table-next-4.0.3.tgz", @@ -8817,6 +8839,21 @@ "refractor": "^3.2.0" } }, + "react-themeable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/react-themeable/-/react-themeable-1.1.0.tgz", + "integrity": "sha512-kl5tQ8K+r9IdQXZd8WLa+xxYN04lLnJXRVhHfdgwsUJr/SlKJxIejoc9z9obEkx1mdqbTw1ry43fxEUwyD9u7w==", + "requires": { + "object-assign": "^3.0.0" + }, + "dependencies": { + "object-assign": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-3.0.0.tgz", + "integrity": "sha512-jHP15vXVGeVh1HuaA2wY6lxk+whK/x4KBG88VXeRma7CCun7iGD5qPc4eYykQ9sdQvg8jkwFKsSxHln2ybW3xQ==" + } + } + }, "react-tooltip": { "version": "4.2.21", "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-4.2.21.tgz", @@ -9352,6 +9389,11 @@ "ajv-keywords": "^3.5.2" } }, + "section-iterator": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/section-iterator/-/section-iterator-2.0.0.tgz", + "integrity": "sha512-xvTNwcbeDayXotnV32zLb3duQsP+4XosHpb/F+tu6VzEZFmIjzPdNk6/O+QOOx5XTh08KL2ufdXeCO33p380pQ==" + }, "select": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/select/-/select-1.1.2.tgz", @@ -9440,6 +9482,11 @@ "safe-buffer": "^5.0.1" } }, + "shallow-equal": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shallow-equal/-/shallow-equal-1.2.1.tgz", + "integrity": "sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA==" + }, "shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -10118,6 +10165,11 @@ "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", "dev": true }, + "textarea-caret": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/textarea-caret/-/textarea-caret-3.1.0.tgz", + "integrity": "sha512-cXAvzO9pP5CGa6NKx0WYHl+8CHKZs8byMkt3PCJBCmq2a34YA9pO1NrQET5pzeqnBjBdToF5No4rrmkDUgQC2Q==" + }, "three": { "version": "0.126.1", "resolved": "https://registry.npmjs.org/three/-/three-0.126.1.tgz", diff --git a/package.json b/package.json index 9f088555..fb733689 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,7 @@ "qs": "^6.9.4", "react": "^16.13.1", "react-ace": "^9.1.3", - "react-autocomplete-input": "1.0.18", + "react-autosuggest": "^10.1.0", "react-addons-update": "^15.6.3", "react-bootstrap-table-next": "^4.0.3", "react-bootstrap-table2-editor": "^1.4.0", diff --git a/test-data/agro_min.ttl b/test-data/agro_min.ttl new file mode 100644 index 00000000..4f895879 --- /dev/null +++ b/test-data/agro_min.ttl @@ -0,0 +1,70 @@ +@prefix askomics: . +@prefix owl: . +@prefix rdf: . +@prefix rdfs: . +@prefix xml: . +@prefix xsd: . +@prefix ns1: . + + + a askomics:ontology ; + a owl:ontology ; + rdfs:label "AGRO". + +[] a owl:ObjectProperty ; + a askomics:AskomicsRelation ; + askomics:uri rdfs:subClassOf ; + rdfs:label "subClassOf" ; + rdfs:domain ; + rdfs:range . + + + a owl:Class ; + rdfs:label "desuckering" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "irrigation water source role" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "irrigation water quantity" ; + rdfs:subClassOf , + . + + a owl:Class ; + rdfs:label "reduced tillage process" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "laser land levelling process" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "chemical pest control process" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "no-till" ; + rdfs:subClassOf , + . + + a owl:Class ; + rdfs:label "puddling process" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "mulch-till" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "ridge-till" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "strip-till" ; + rdfs:subClassOf . + + a owl:Class ; + rdfs:label "aerial application" ; + rdfs:subClassOf . diff --git a/tests/conftest.py b/tests/conftest.py index 84d08eed..999d0248 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -251,7 +251,8 @@ def upload_file(self, file_path): ".tsv": "text/tab-separated-values", ".csv": "text/tab-separated-values", ".gff3": "null", - ".bed": "null" + ".bed": "null", + ".ttl": "rdf/ttl" } with open(file_path, 'r') as file_content: @@ -293,7 +294,7 @@ def upload_file_url(self, file_url): files.download_url(file_url, "1") return files.date - def integrate_file(self, info, public=False, set_graph=False): + def integrate_file(self, info, public=False, set_graph=False, endpoint=""): """Summary Parameters @@ -309,6 +310,9 @@ def integrate_file(self, info, public=False, set_graph=False): files_handler = FilesHandler(self.app, self.session) files_handler.handle_files([info["id"], ]) + # TODO: Fix this. Why do we need the virtuoso url? + endpoint = endpoint or "http://virtuoso:8890/sparql" + for file in files_handler.files: dataset_info = { @@ -320,7 +324,7 @@ def integrate_file(self, info, public=False, set_graph=False): } dataset = Dataset(self.app, self.session, dataset_info) - dataset.save_in_db("http://virtuoso:8890/sparql", set_graph=set_graph) + dataset.save_in_db(endpoint, set_graph=set_graph) if file.type == "csv/tsv": file.integrate(dataset.id, info["columns_type"], public=public) @@ -328,7 +332,8 @@ def integrate_file(self, info, public=False, set_graph=False): file.integrate(dataset.id, info["entities"], public=public) elif file.type == "bed": file.integrate(dataset.id, info["entity_name"], public=public) - + elif file.type in ('rdf/ttl', 'rdf/xml', 'rdf/nt'): + file.integrate(public=public) # done dataset.update_in_db("success") dataset.set_info_from_db() @@ -336,7 +341,9 @@ def integrate_file(self, info, public=False, set_graph=False): return { "timestamp": file.timestamp, "start": dataset.start, - "end": dataset.end + "end": dataset.end, + "graph": dataset.graph_name, + "endpoint": dataset.endpoint } def upload(self): @@ -445,6 +452,31 @@ def upload_and_integrate(self, set_graph=False): } } + def upload_and_integrate_ontology(self): + """Summary + + Returns + ------- + TYPE + Description + """ + # upload + up_ontology = self.upload_file("test-data/agro_min.ttl") + + # integrate + int_ontology = self.integrate_file({ + "id": 1, + }, set_graph=True, endpoint="http://localhost:8891/sparql-auth") + + return { + "upload": up_ontology, + "timestamp": int_ontology["timestamp"], + "start": int_ontology["start"], + "end": int_ontology["end"], + "graph": int_ontology["graph"], + "endpoint": int_ontology["endpoint"] + } + def create_result(self, has_form=False): """Create a result entry in db @@ -590,9 +622,10 @@ def create_prefix(self): def create_ontology(self): """Create ontology""" - self.upload_and_integrate() + data = self.upload_and_integrate_ontology() om = OntologyManager(self.app, self.session) - om.add_ontology("Open Biological and Biomedical Ontology", "http://purl.obolibrary.org/obo/agro.owl", "OBO", 1, "mygraph", "local") + om.add_ontology("AgrO ontology", "http://purl.obolibrary.org/obo/agro.owl", "AGRO", 1, data["graph"], data['endpoint'], type="local") + return data["graph"], data["endpoint"] @staticmethod def get_random_string(number): diff --git a/tests/test_api.py b/tests/test_api.py index ea66cc70..81a8bb23 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -44,7 +44,8 @@ def test_start(self, client): "user": {}, "logged": False, "ontologies": [], - "singleTenant": False + "singleTenant": False, + "autocompleteMaxResults": 20 } response = client.client.get('/api/start') assert response.status_code == 200 diff --git a/tests/test_api_admin.py b/tests/test_api_admin.py index d3b74f6d..30b6cbba 100644 --- a/tests/test_api_admin.py +++ b/tests/test_api_admin.py @@ -605,7 +605,7 @@ def test_view_ontologies(self, client): assert response.status_code == 200 assert response.json == expected_empty - client.create_ontology() + graph, endpoint = client.create_ontology() response = client.client.get('/api/admin/getontologies') @@ -614,13 +614,15 @@ def test_view_ontologies(self, client): "errorMessage": "", "ontologies": [{ "id": 1, - "name": "Open Biological and Biomedical Ontology", + "name": "AgrO ontology", "uri": "http://purl.obolibrary.org/obo/agro.owl", - "short_name": "OBO", + "short_name": "AGRO", "type": "local", "dataset_id": 1, - "dataset_name": "transcripts.tsv", - "graph": "mygraph", + "dataset_name": "agro_min.ttl", + "graph": graph, + "endpoint": endpoint, + "remote_graph": None, "label_uri": "rdfs:label" }] } @@ -633,13 +635,15 @@ def test_add_ontology(self, client): client.create_two_users() client.log_user("jsmith") - data = {"shortName": "OBO", "uri": "http://purl.obolibrary.org/obo/agro.owl", "name": "Open Biological and Biomedical Ontology", "type": "local", "datasetId": 1, "labelUri": "rdfs:label"} + data = {"shortName": "AGRO", "uri": "http://purl.obolibrary.org/obo/agro.owl", "name": "AgrO ontology", "type": "local", "datasetId": 1, "labelUri": "rdfs:label"} response = client.client.post('/api/admin/addontology', json=data) assert response.status_code == 401 client.log_user("jdoe") - client.upload_and_integrate(set_graph=True) + graph_data = client.upload_and_integrate_ontology() + graph = graph_data["graph"] + endpoint = graph_data["endpoint"] response = client.client.post('/api/admin/addontology', json=data) @@ -655,22 +659,21 @@ def test_add_ontology(self, client): "errorMessage": "", "ontologies": [{ "id": 1, - "name": "Open Biological and Biomedical Ontology", + "name": "AgrO ontology", "uri": "http://purl.obolibrary.org/obo/agro.owl", - "short_name": "OBO", + "short_name": "AGRO", "type": "local", "dataset_id": 1, - "dataset_name": "transcripts.tsv", - "label_uri": "rdfs:label" + "dataset_name": "agro_min.ttl", + "label_uri": "rdfs:label", + "graph": graph, + "endpoint": endpoint, + "remote_graph": None }] } - # Graph name is random - res = response.json - res['ontologies'][0].pop('graph') - assert response.status_code == 200 - assert res == expected + assert response.json == expected def test_delete_ontologies(self, client): """test /api/admin/delete_ontologies route""" diff --git a/tests/test_api_ontology.py b/tests/test_api_ontology.py new file mode 100644 index 00000000..c22a1821 --- /dev/null +++ b/tests/test_api_ontology.py @@ -0,0 +1,73 @@ +from . import AskomicsTestCase + + +class TestApiOntology(AskomicsTestCase): + """Test AskOmics API /api/ontology/""" + + def test_local_autocompletion_protected(self, client): + """ Test autocompletion on missing ontology""" + query = "blabla" + client.set_config("askomics", "protect_public", "true") + response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query)) + + assert response.status_code == 401 + assert len(response.json["results"]) == 0 + + def test_local_autocompletion_missing_ontology(self, client): + """ Test autocompletion on missing ontology""" + query = "blabla" + response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query)) + + assert response.status_code == 404 + assert len(response.json["results"]) == 0 + + def test_local_autocompletion(self, client): + """test /api/ontology/AGRO/autocomplete route""" + client.create_two_users() + client.log_user("jdoe") + + client.create_ontology() + + query = "blabla" + response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query)) + + assert response.status_code == 200 + assert len(response.json["results"]) == 0 + assert response.json["results"] == [] + + query = "" + response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query)) + + expected = [ + "desuckering", + "irrigation water source role", + "irrigation water quantity", + "reduced tillage process", + "laser land levelling process", + "chemical pest control process", + "no-till", + "puddling process", + "mulch-till", + "ridge-till", + "strip-till", + "aerial application" + ] + + assert response.status_code == 200 + assert len(response.json["results"]) == 12 + + # SPARQL order is not reliable, so we make sure to return everything + # If it fails, skip this + assert self.equal_objects(response.json["results"], expected) + + query = "irrigation" + response = client.client.get('/api/ontology/AGRO/autocomplete?q={}'.format(query)) + + expected = [ + "irrigation water source role", + "irrigation water quantity" + ] + + assert response.status_code == 200 + assert len(response.json["results"]) == 2 + assert self.equal_objects(response.json["results"], expected)