Skip to content

Commit

Permalink
Merge 6c05bfd into e440bb1
Browse files Browse the repository at this point in the history
  • Loading branch information
mboudet committed Jun 28, 2022
2 parents e440bb1 + 6c05bfd commit a7faa8c
Show file tree
Hide file tree
Showing 20 changed files with 465 additions and 80 deletions.
10 changes: 5 additions & 5 deletions askomics/api/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -698,12 +698,12 @@ def add_ontology():
'errorMessage': "Missing parameter"
}), 400

name = data.get("name")
uri = data.get("uri")
name = data.get("name").strip()
uri = data.get("uri").strip()
short_name = data.get("shortName")
type = data.get("type")
type = data.get("type").strip()
dataset_id = data.get("datasetId")
label_uri = data.get("labelUri")
label_uri = data.get("labelUri").strip()

om = OntologyManager(current_app, session)

Expand Down Expand Up @@ -760,7 +760,7 @@ def add_ontology():
}), 400

try:
om.add_ontology(name, uri, short_name, dataset.id, dataset.graph_name, dataset.endpoint, type, label_uri)
om.add_ontology(name, uri, short_name, dataset.id, dataset.graph_name, dataset.endpoint, remote_graph=dataset.remote_graph, type=type, label_uri=label_uri)
ontologies = om.list_full_ontologies()
except Exception as e:
traceback.print_exc(file=sys.stdout)
Expand Down
2 changes: 1 addition & 1 deletion askomics/api/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ def integrate():
endpoint = data["externalEndpoint"] or current_app.iniconfig.get('triplestore', 'endpoint')

dataset = Dataset(current_app, session, dataset_info)
dataset.save_in_db(endpoint)
dataset.save_in_db(endpoint, data["externalGraph"])
data["dataset_id"] = dataset.id
dataset_ids.append(dataset.id)
task = current_app.celery.send_task('integrate', (session_dict, data, request.host_url))
Expand Down
17 changes: 11 additions & 6 deletions askomics/api/ontology.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import traceback
import sys
from askomics.api.auth import api_auth, login_required

from askomics.api.auth import api_auth
from askomics.libaskomics.OntologyManager import OntologyManager

from flask import (Blueprint, current_app, jsonify, request, session)
Expand All @@ -10,7 +11,6 @@

@onto_bp.route("/api/ontology/<short_ontology>/autocomplete", methods=["GET"])
@api_auth
@login_required
def autocomplete(short_ontology):
"""Get the default sparql query
Expand All @@ -19,8 +19,13 @@ def autocomplete(short_ontology):
json
"""

if "user" not in session and current_app.iniconfig.getboolean("askomics", "protect_public"):
return jsonify({
"error": True,
"errorMessage": "Ontology {} not found".format(short_ontology),
"results": []
}), 401
try:
# Disk space
om = OntologyManager(current_app, session)
ontology = om.get_ontology(short_name=short_ontology)
if not ontology:
Expand All @@ -37,18 +42,18 @@ def autocomplete(short_ontology):
"results": []
}), 404

results = om.autocomplete(ontology["uri"], ontology["type"], request.args.get("q"), short_ontology, ontology["graph"], ontology["endpoint"])
results = om.autocomplete(ontology["uri"], ontology["type"], request.args.get("q"), short_ontology, ontology["graph"], ontology["endpoint"], ontology['label_uri'], ontology['remote_graph'])

except Exception as e:
traceback.print_exc(file=sys.stdout)
return jsonify({
"error": True,
"errorMessage": str(e),
"results": []
}), 404
}), 500

return jsonify({
"error": True,
"error": False,
"errorMessage": "",
"results": results
}), 200
3 changes: 2 additions & 1 deletion askomics/api/start.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,8 @@ def start():
"user": {},
"logged": False,
"ontologies": ontologies,
"singleTenant": current_app.iniconfig.getboolean('askomics', 'single_tenant', fallback=False)
"singleTenant": current_app.iniconfig.getboolean('askomics', 'single_tenant', fallback=False),
"autocompleteMaxResults": current_app.iniconfig.getint("askomics", "autocomplete_max_results", fallback=10)
}

json = {
Expand Down
21 changes: 21 additions & 0 deletions askomics/libaskomics/Database.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,17 @@ def update_datasets_table(self):
except Exception:
pass

query = '''
ALTER TABLE datasets
ADD remote_graph text NULL
DEFAULT(null)
'''

try:
self.execute_sql_query(query)
except Exception:
pass

def create_integration_table(self):
"""Create the integration table"""
query = '''
Expand Down Expand Up @@ -455,3 +466,13 @@ def create_ontologies_table(self):
self.execute_sql_query(query)
except Exception:
pass

query = '''
ALTER TABLE ontologies
ADD remote_graph text NULL
'''

try:
self.execute_sql_query(query)
except Exception:
pass
14 changes: 10 additions & 4 deletions askomics/libaskomics/Dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ def __init__(self, app, session, dataset_info={}):
self.start = dataset_info["start"] if "start" in dataset_info else None
self.end = dataset_info["end"] if "end" in dataset_info else None
self.ontology = dataset_info["ontology"] if "ontology" in dataset_info else False
self.endpoint = dataset_info["endpoint"] if "endpoint" in dataset_info else False
self.remote_graph = dataset_info["remote_graph"] if "remote_graph" in dataset_info else False

def set_info_from_db(self, admin=False):
"""Set the info in from the database"""
Expand All @@ -59,7 +61,7 @@ def set_info_from_db(self, admin=False):
where_query = "AND user_id = ?"

query = '''
SELECT celery_id, file_id, name, graph_name, public, start, end, ontology, endpoint
SELECT celery_id, file_id, name, graph_name, public, start, end, ontology, endpoint, remote_graph
FROM datasets
WHERE id = ?
{}
Expand All @@ -76,8 +78,9 @@ def set_info_from_db(self, admin=False):
self.end = rows[0][6]
self.ontology = rows[0][7]
self.endpoint = rows[0][8]
self.remote_graph = rows[0][9]

def save_in_db(self, endpoint, set_graph=False):
def save_in_db(self, endpoint, remote_graph=None, set_graph=False):
"""Save the dataset into the database"""
database = Database(self.app, self.session)

Expand All @@ -89,7 +92,8 @@ def save_in_db(self, endpoint, set_graph=False):
self.name,
self.public,
0,
endpoint
endpoint,
remote_graph
)

if set_graph:
Expand All @@ -102,7 +106,8 @@ def save_in_db(self, endpoint, set_graph=False):
self.graph_name,
self.public,
0,
endpoint
endpoint,
remote_graph
)

query = '''
Expand All @@ -122,6 +127,7 @@ def save_in_db(self, endpoint, set_graph=False):
NULL,
NULL,
0,
?,
?
)
'''.format(subquery)
Expand Down
32 changes: 21 additions & 11 deletions askomics/libaskomics/OntologyManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def list_full_ontologies(self):
database = Database(self.app, self.session)

query = '''
SELECT ontologies.id, ontologies.name, ontologies.uri, ontologies.short_name, ontologies.type, ontologies.label_uri, datasets.id, datasets.name, ontologies.graph
SELECT ontologies.id, ontologies.name, ontologies.uri, ontologies.short_name, ontologies.type, ontologies.label_uri, datasets.id, datasets.name, ontologies.graph, ontologies.endpoint, ontologies.remote_graph
FROM ontologies
INNER JOIN datasets ON datasets.id=ontologies.dataset_id
'''
Expand All @@ -94,7 +94,9 @@ def list_full_ontologies(self):
'label_uri': row[5],
'dataset_id': row[6],
'dataset_name': row[7],
'graph': row[8]
'graph': row[8],
'endpoint': row[9],
'remote_graph': row[10]
}
ontologies.append(prefix)

Expand Down Expand Up @@ -123,7 +125,7 @@ def get_ontology(self, short_name="", uri=""):
database = Database(self.app, self.session)

query = '''
SELECT id, name, uri, short_name, type, dataset_id, graph, label_uri
SELECT id, name, uri, short_name, type, dataset_id, graph, label_uri, endpoint, remote_graph
FROM ontologies
{}
'''.format(where_clause)
Expand All @@ -142,10 +144,12 @@ def get_ontology(self, short_name="", uri=""):
'type': ontology[4],
'dataset_id': ontology[5],
'graph': ontology[6],
'label_uri': ontology[7]
'label_uri': ontology[7],
'endpoint': ontology[8],
'remote_graph': ontology[9]
}

def add_ontology(self, name, uri, short_name, dataset_id, graph, endpoint, type="local", label_uri="rdfs:label"):
def add_ontology(self, name, uri, short_name, dataset_id, graph, endpoint, remote_graph=None, type="local", label_uri="rdfs:label"):
"""Create a new ontology
Returns
Expand All @@ -167,11 +171,12 @@ def add_ontology(self, name, uri, short_name, dataset_id, graph, endpoint, type=
?,
?,
?,
?,
?
)
'''

database.execute_sql_query(query, (name, uri, short_name, type, dataset_id, graph, label_uri, endpoint))
database.execute_sql_query(query, (name, uri, short_name, type, dataset_id, graph, label_uri, endpoint, remote_graph))

query = '''
UPDATE datasets SET
Expand Down Expand Up @@ -225,7 +230,7 @@ def test_ols_ontology(self, shortname):
r = requests.get(base_url)
return r.status_code == 200

def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name, onto_graph, onto_endpoint):
def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name, onto_graph, onto_endpoint, custom_label, remote_graph):
"""Search in ontology
Returns
Expand All @@ -242,13 +247,18 @@ def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name,
# TODO: Actually store the graph in the ontology to quicken search
query.set_graphs([onto_graph])
query.set_endpoints(set([self.settings.get('triplestore', 'endpoint'), onto_endpoint]))
return query.autocomplete_local_ontology(ontology_uri, query_term, max_results)
if remote_graph:
query.set_remote_graph({onto_endpoint: [remote_graph]})

return query.autocomplete_local_ontology(ontology_uri, query_term, max_results, custom_label)
elif ontology_type == "ols":
base_url = "https://www.ebi.ac.uk/ols/api/suggest"
base_url = "https://www.ebi.ac.uk/ols/api/select"
arguments = {
"q": query_term,
"ontology": quote_plus(onto_short_name.lower()),
"rows": max_results
"rows": max_results,
"type": "class",
"fieldList": "label"
}

r = requests.get(base_url, params=arguments)
Expand All @@ -260,6 +270,6 @@ def autocomplete(self, ontology_uri, ontology_type, query_term, onto_short_name,

res = r.json()
if res['response']['docs']:
data = [term['autosuggest'] for term in res['response']['docs']]
data = [term['label'] for term in res['response']['docs']]

return data
30 changes: 21 additions & 9 deletions askomics/libaskomics/SparqlQuery.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,16 @@ def set_endpoints(self, endpoints):
"""
self.endpoints = endpoints

def set_remote_graph(self, remote_graphs):
"""Set endpoints
Parameters
----------
endpoints : list
Endpoints
"""
self.remote_graphs = remote_graphs

def is_federated(self):
"""Return True if there is more than 1 endpoint
Expand Down Expand Up @@ -299,11 +309,13 @@ def format_query(self, query, limit=30, replace_froms=True, federated=False, ign
formatted sparql query
"""
froms = ''
if replace_froms and (not self.settings.getboolean("askomics", "single_tenant", fallback=False) or ignore_single_tenant):
froms = self.get_froms()

if federated:
federated_line = "{}\n{}".format(self.get_federated_line(), self.get_federated_froms())
federated_line = "" if self.settings.getboolean("askomics", "single_tenant", fallback=False) else "{}\n{}".format(self.get_federated_line(), self.get_federated_froms())
federated_graphs_string = self.get_federated_remote_from_graphs()
else:
if replace_froms and (not self.settings.getboolean("askomics", "single_tenant", fallback=False)):
froms = self.get_froms()

query_lines = query.split('\n')

Expand All @@ -314,6 +326,7 @@ def format_query(self, query, limit=30, replace_froms=True, federated=False, ign
if not line.upper().lstrip().startswith('FROM') and not line.upper().lstrip().startswith('LIMIT') and not line.upper().lstrip().startswith('@FEDERATE'):
if line.upper().lstrip().startswith('SELECT') and federated:
new_query += "\n{}\n".format(federated_line)
new_query += "\n{}\n".format(federated_graphs_string)
new_query += '\n{}'.format(line)
# Add new FROM
if line.upper().lstrip().startswith('SELECT'):
Expand Down Expand Up @@ -378,7 +391,6 @@ def get_federated_froms_from_graphs(self, graphs):
from_string = "@from <{}>".format(self.local_endpoint_f)
for graph in graphs:
from_string += " <{}>".format(graph)

return from_string

def get_federated_remote_from_graphs(self):
Expand Down Expand Up @@ -567,7 +579,7 @@ def get_uri_parameters(self, uri, endpoints):

return formated_data

def autocomplete_local_ontology(self, uri, query, max_terms):
def autocomplete_local_ontology(self, uri, query, max_terms, label):
"""Get results for a specific query
Parameters
Expand All @@ -590,17 +602,17 @@ def autocomplete_local_ontology(self, uri, query, max_terms):
raw_query = '''
SELECT DISTINCT ?label
WHERE {{
?uri rdf:type <{}> .
?uri rdfs:label ?label .
?uri rdf:type owl:Class .
?uri {} ?label .
{}
}}
'''.format(uri, subquery)
'''.format(label, subquery)

raw_query = self.prefix_query(raw_query)

is_federated = self.is_federated()

sparql = self.format_query(raw_query, limit=max_terms, replace_froms=True, federated=is_federated, ignore_single_tenant=True)
sparql = self.format_query(raw_query, limit=max_terms, replace_froms=True, federated=is_federated)

query_launcher = SparqlQueryLauncher(self.app, self.session, get_result_query=True, federated=is_federated)
_, data = query_launcher.process_query(sparql)
Expand Down

0 comments on commit a7faa8c

Please sign in to comment.