From 4ed7ebff477dbfa9bfa573672f6b96f9874d836a Mon Sep 17 00:00:00 2001 From: Olivier Filangi Date: Fri, 20 Oct 2017 13:35:07 +0200 Subject: [PATCH 001/136] - remove allowed_upload property. this key was to avoid upload from askomics if askomics was used by another program. this use case have to be managage in an other way. - add blazegraph triplestore management --- askomics/ask_view.py | 2 - .../templates/handlebars/uploaded_files.hbs | 4 +- configs/development.blazegraph.ini | 100 ++++++++++++++++++ configs/development.mulgara.ini | 100 ++++++++++++++++++ configs/production.blazegraph.ini | 100 ++++++++++++++++++ 5 files changed, 301 insertions(+), 5 deletions(-) create mode 100644 configs/development.blazegraph.ini create mode 100644 configs/development.mulgara.ini create mode 100644 configs/production.blazegraph.ini diff --git a/askomics/ask_view.py b/askomics/ask_view.py index 7294e294..c8779980 100644 --- a/askomics/ask_view.py +++ b/askomics/ask_view.py @@ -1787,7 +1787,6 @@ def get_uploaded_files(self): param_manager = ParamManager(self.settings, self.request.session) path = param_manager.get_upload_directory() - allowed_upload = param_manager.get_param('askomics.allowed_upload') self.data = {} self.data['files'] = {} @@ -1800,7 +1799,6 @@ def get_uploaded_files(self): self.data['files'][file] = file_size self.data['galaxy'] = self.request.session['galaxy'] - self.data['allowed_upload'] = allowed_upload return self.data diff --git a/askomics/static/src/templates/handlebars/uploaded_files.hbs b/askomics/static/src/templates/handlebars/uploaded_files.hbs index 6bb01da7..fb4de29b 100644 --- a/askomics/static/src/templates/handlebars/uploaded_files.hbs +++ b/askomics/static/src/templates/handlebars/uploaded_files.hbs @@ -1,14 +1,12 @@

Uploaded files


-{{#if allowed_upload}}
{{#if galaxy}} {{/if}}
-{{/if}}

@@ -33,4 +31,4 @@ - \ No newline at end of file + diff --git a/configs/development.blazegraph.ini b/configs/development.blazegraph.ini new file mode 100644 index 00000000..e0062144 --- /dev/null +++ b/configs/development.blazegraph.ini @@ -0,0 +1,100 @@ +### +# app configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/environment.html +### + +[app:main] +use = egg:Askomics + +# Pyramid debug mode +pyramid.reload_templates = true +pyramid.debug_authorization = false +pyramid.debug_notfound = false +pyramid.debug_routematch = false +pyramid.default_locale_name = en +pyramid.includes = pyramid_debugtoolbar +askomics.debug = true +# debugtoolbar.hosts = 127.0.0.1 ::1 +# docker run --name blazegraph -p 8889:8080 lyrasis/blazegraph:2.1.4 +# Triplestore configuration +askomics.endpoint = http://localhost:8889/bigdata/sparql +askomics.hack_virtuoso = false +askomics.upload_user_data_method=insert +askomics.max_content_size_to_update_database = 4000 +askomics.graph = urn:sparql:askomics +askomics.users_graph = urn:sparql:askomics:users +askomics.prefix = http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology# +askomics.display_setting = http://www.irisa.fr/dyliss/rdfVisualization/display +askomics.delete_method = DELETE +#can be POST or DELETE + +# AskOmics configuration +askomics.overview_lines_limit = 200 +askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values','text/fasta' +askomics.upload_min_size = 1 +askomics.upload_max_size = 200000000 +askomics.files_dir = /tmp/askomics + +# Authentication +askomics.salt = AskOmics +askomics.secret = seppfmhcag + +# Proxy setting +# Set askomics.proxy to: +# - auto to use the system proxy (default) +# - custom to use specified http, https, username and password information +# - noproxy to use AskOmics with no proxies defined +askomics.proxy = auto +# These parameters are used only if askomics.proxy = custom +askomics.proxy_http = http://www.example.com:3128/ +askomics.proxy_https = https://www.example.com:3129/ +askomics.proxy_username = username +askomics.proxy_password = password + +# SMTP Server +#askomics.smtp_host= +#askomics.smtp_port= +#askomics.smtp_login= +#askomics.smtp_password= +#askomics.smtp_starttls= + +### +# wsgi server configuration +### + +[server:main] +use = egg:waitress#main +; host = localhost +port = 6543 + +### +# logging configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/logging.html +### + +[loggers] +keys = root, askomics + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = DEBUG +handlers = console + +[logger_askomics] +level = DEBUG +handlers = +qualname = askomics + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s diff --git a/configs/development.mulgara.ini b/configs/development.mulgara.ini new file mode 100644 index 00000000..19105d3c --- /dev/null +++ b/configs/development.mulgara.ini @@ -0,0 +1,100 @@ +### +# app configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/environment.html +### + +[app:main] +use = egg:Askomics + +# Pyramid debug mode +pyramid.reload_templates = true +pyramid.debug_authorization = false +pyramid.debug_notfound = false +pyramid.debug_routematch = false +pyramid.default_locale_name = en +pyramid.includes = pyramid_debugtoolbar +askomics.debug = true +# debugtoolbar.hosts = 127.0.0.1 ::1 + +# Triplestore configuration +askomics.endpoint = http://localhost:8080/sparql +askomics.hack_virtuoso = false +askomics.upload_user_data_method=locad +askomics.max_content_size_to_update_database = 20000 +askomics.graph = urn:sparql:askomics +askomics.users_graph = urn:sparql:askomics:users +askomics.prefix = http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology# +askomics.display_setting = http://www.irisa.fr/dyliss/rdfVisualization/display +askomics.delete_method = DELETE +#can be POST or DELETE + +# AskOmics configuration +askomics.overview_lines_limit = 200 +askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values','text/fasta' +askomics.upload_min_size = 1 +askomics.upload_max_size = 200000000 +askomics.files_dir = /tmp/askomics + +# Authentication +askomics.salt = AskOmics +askomics.secret = seppfmhcag + +# Proxy setting +# Set askomics.proxy to: +# - auto to use the system proxy (default) +# - custom to use specified http, https, username and password information +# - noproxy to use AskOmics with no proxies defined +askomics.proxy = auto +# These parameters are used only if askomics.proxy = custom +askomics.proxy_http = http://www.example.com:3128/ +askomics.proxy_https = https://www.example.com:3129/ +askomics.proxy_username = username +askomics.proxy_password = password + +# SMTP Server +#askomics.smtp_host= +#askomics.smtp_port= +#askomics.smtp_login= +#askomics.smtp_password= +#askomics.smtp_starttls= + +### +# wsgi server configuration +### + +[server:main] +use = egg:waitress#main +; host = localhost +port = 6543 + +### +# logging configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/logging.html +### + +[loggers] +keys = root, askomics + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = DEBUG +handlers = console + +[logger_askomics] +level = DEBUG +handlers = +qualname = askomics + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s diff --git a/configs/production.blazegraph.ini b/configs/production.blazegraph.ini new file mode 100644 index 00000000..cf3b99a3 --- /dev/null +++ b/configs/production.blazegraph.ini @@ -0,0 +1,100 @@ +### +# app configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/environment.html +### + +[app:main] +use = egg:Askomics + +# Pyramid debug mode +pyramid.reload_templates = true +pyramid.debug_authorization = false +pyramid.debug_notfound = false +pyramid.debug_routematch = false +pyramid.default_locale_name = en +pyramid.includes = pyramid_debugtoolbar +askomics.debug = false +# debugtoolbar.hosts = 127.0.0.1 ::1 +# docker run --name blazegraph -p 8889:8080 lyrasis/blazegraph:2.1.4 +# Triplestore configuration +askomics.endpoint = http://localhost:8889/bigdata/sparql +askomics.hack_virtuoso = false +askomics.upload_user_data_method=insert +askomics.max_content_size_to_update_database = 4000 +askomics.graph = urn:sparql:askomics +askomics.users_graph = urn:sparql:askomics:users +askomics.prefix = http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology# +askomics.display_setting = http://www.irisa.fr/dyliss/rdfVisualization/display +askomics.delete_method = DELETE +#can be POST or DELETE + +# AskOmics configuration +askomics.overview_lines_limit = 200 +askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values','text/fasta' +askomics.upload_min_size = 1 +askomics.upload_max_size = 200000000 +askomics.files_dir = /tmp/askomics + +# Authentication +askomics.salt = AskOmics +askomics.secret = seppfmhcag + +# Proxy setting +# Set askomics.proxy to: +# - auto to use the system proxy (default) +# - custom to use specified http, https, username and password information +# - noproxy to use AskOmics with no proxies defined +askomics.proxy = auto +# These parameters are used only if askomics.proxy = custom +askomics.proxy_http = http://www.example.com:3128/ +askomics.proxy_https = https://www.example.com:3129/ +askomics.proxy_username = username +askomics.proxy_password = password + +# SMTP Server +#askomics.smtp_host= +#askomics.smtp_port= +#askomics.smtp_login= +#askomics.smtp_password= +#askomics.smtp_starttls= + +### +# wsgi server configuration +### + +[server:main] +use = egg:waitress#main +; host = localhost +port = 6543 + +### +# logging configuration +# http://docs.pylonsproject.org/projects/pyramid/en/1.5-branch/narr/logging.html +### + +[loggers] +keys = root, askomics + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = INFO +handlers = console + +[logger_askomics] +level = INFO +handlers = +qualname = askomics + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s From f1a0358707adf0b987be859d90b548a07b10b6bf Mon Sep 17 00:00:00 2001 From: Olivier Filangi Date: Fri, 20 Oct 2017 14:19:18 +0200 Subject: [PATCH 002/136] - fix allowed_upload --- askomics/static/src/js/core/IHMLocal.js | 2 +- configs/development.agraph.ini | 1 - configs/development.corese.ini | 1 - configs/development.fuseki.ini | 1 - configs/development.stardog.ini | 1 - configs/development.virtuoso.ini | 3 ++- configs/production.agraph.ini | 1 - configs/production.fuseki.ini | 1 - configs/production.virtuoso.ini | 1 - configs/test.virtuoso.ini | 1 - 10 files changed, 3 insertions(+), 10 deletions(-) diff --git a/askomics/static/src/js/core/IHMLocal.js b/askomics/static/src/js/core/IHMLocal.js index ecd0fa21..0c1453f2 100644 --- a/askomics/static/src/js/core/IHMLocal.js +++ b/askomics/static/src/js/core/IHMLocal.js @@ -452,7 +452,7 @@ class IHMLocal { let service = new RestServiceJs("get_uploaded_files"); service.getAll(function(data) { let template = AskOmics.templates.uploaded_files; - let context = {files: data.files, galaxy: data.galaxy, allowed_upload: $.parseJSON(data.allowed_upload)}; + let context = { files: data.files, galaxy: data.galaxy }; let html = template(context); $('#content_integration').empty(); $('#content_integration').append(html); diff --git a/configs/development.agraph.ini b/configs/development.agraph.ini index 9c6499c1..3bff1184 100644 --- a/configs/development.agraph.ini +++ b/configs/development.agraph.ini @@ -35,7 +35,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics diff --git a/configs/development.corese.ini b/configs/development.corese.ini index a3443c9a..6b4d005a 100644 --- a/configs/development.corese.ini +++ b/configs/development.corese.ini @@ -33,7 +33,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics askomics.secret = seppfmhcag diff --git a/configs/development.fuseki.ini b/configs/development.fuseki.ini index 3f5862b7..845b3ed6 100644 --- a/configs/development.fuseki.ini +++ b/configs/development.fuseki.ini @@ -37,7 +37,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics diff --git a/configs/development.stardog.ini b/configs/development.stardog.ini index e72663e9..af516644 100644 --- a/configs/development.stardog.ini +++ b/configs/development.stardog.ini @@ -51,7 +51,6 @@ askomics.upload_max_size = 200000000 askomics.delete_method = DELETE askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Proxy setting # Set askomics.proxy to: diff --git a/configs/development.virtuoso.ini b/configs/development.virtuoso.ini index 9a180ccc..de45d915 100644 --- a/configs/development.virtuoso.ini +++ b/configs/development.virtuoso.ini @@ -18,6 +18,8 @@ askomics.debug = true # Triplestore configuration askomics.endpoint = http://localhost:8890/sparql +askomics.updatepoint = http://localhost:8890/sparql +#askomics.endpoint = http://localhost:8000/test askomics.hack_virtuoso = true askomics.upload_user_data_method=load askomics.max_content_size_to_update_database = 4000 @@ -34,7 +36,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics diff --git a/configs/production.agraph.ini b/configs/production.agraph.ini index c2b05b12..a9a47036 100644 --- a/configs/production.agraph.ini +++ b/configs/production.agraph.ini @@ -35,7 +35,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics diff --git a/configs/production.fuseki.ini b/configs/production.fuseki.ini index aa05c374..b294f40e 100644 --- a/configs/production.fuseki.ini +++ b/configs/production.fuseki.ini @@ -33,7 +33,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics diff --git a/configs/production.virtuoso.ini b/configs/production.virtuoso.ini index dfe8c5cf..f5cf0373 100644 --- a/configs/production.virtuoso.ini +++ b/configs/production.virtuoso.ini @@ -33,7 +33,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics diff --git a/configs/test.virtuoso.ini b/configs/test.virtuoso.ini index 3cd6a0d2..099d9fc8 100644 --- a/configs/test.virtuoso.ini +++ b/configs/test.virtuoso.ini @@ -33,7 +33,6 @@ askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics_test -askomics.allowed_upload = true # Authentication askomics.salt = AskOmics From d12f3c275bd6432fe7b61eb203fb9ba44a5fd71b Mon Sep 17 00:00:00 2001 From: Olivier Filangi Date: Thu, 26 Oct 2017 14:14:44 +0200 Subject: [PATCH 003/136] implementation with federation . two case: - federation between askomics to allowed communication between IS - federation with external endpoint (EBI,...) --- askomics/ask_view.py | 7 +- askomics/libaskomics/EndpointManager.py | 189 ++++++++++++++++++ askomics/libaskomics/JobManager.py | 2 +- askomics/libaskomics/ParamManager.py | 9 + askomics/libaskomics/TripleStoreExplorer.py | 61 ++---- askomics/libaskomics/rdfdb/QueryLauncher.py | 61 +++++- .../libaskomics/rdfdb/SparqlQueryGraph.py | 126 ++++++++---- .../src/js/core/AskomicsUserAbstraction.js | 19 +- askomics/static/src/js/core/IHMLocal.js | 4 - configs/development.virtuoso.ini | 3 +- 10 files changed, 372 insertions(+), 109 deletions(-) create mode 100644 askomics/libaskomics/EndpointManager.py diff --git a/askomics/ask_view.py b/askomics/ask_view.py index c8779980..4de130d3 100644 --- a/askomics/ask_view.py +++ b/askomics/ask_view.py @@ -765,13 +765,8 @@ def getUserAbstraction(self): self.log.debug("== getUserAbstraction ==") body = self.request.json_body - service = '' - if 'service' in body : - service = body['service'] - tse = TripleStoreExplorer(self.settings, self.request.session) - self.data.update(tse.getUserAbstraction(service)) - + self.data.update(tse.getUserAbstraction()) return self.data #TODO : this method is too generic. The build of RDF Shortucts should be here to avoid injection with bad intention... diff --git a/askomics/libaskomics/EndpointManager.py b/askomics/libaskomics/EndpointManager.py new file mode 100644 index 00000000..0c5a10a6 --- /dev/null +++ b/askomics/libaskomics/EndpointManager.py @@ -0,0 +1,189 @@ +from askomics.libaskomics.ParamManager import ParamManager + +import logging +import sqlite3 +import platform + +class EndpointManager(ParamManager): + """ + Manage Askomics endpoint inside a sqlite database + """ + def __init__(self, settings, session): + ParamManager.__init__(self, settings, session) + self.log = logging.getLogger(__name__) + self.databasename = "endpoints.db" + self.pathdb = self.get_db_directory()+self.databasename + self.log.info(" ==> "+ self.pathdb +"<=="); + + conn = sqlite3.connect("file:"+self.pathdb,uri=True) + c = conn.cursor() + reqSql = '''CREATE TABLE IF NOT EXISTS endpoints + ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name text, + url text, + user text, + passwd text, + auth text, + askomics integer + )''' + + c.execute(reqSql) + conn.commit() + + name = 'Askomics-'+platform.node() + url = self.get_param("askomics.endpoint") + user = 'NULL' + if self.is_defined("askomics.endpoint_username") : + user = self.get_param("askomics.endpoint_username") + passwd = 'NULL' + if self.is_defined("askomics.endpoint_passwd"): + passwd = self.get_param("askomics.endpoint_passwd") + auth = 'NULL' + if self.is_defined("askomics.askomics.endpoint.auth"): + auth = self.get_param("askomics.askomics.endpoint.auth") + + reqSql ="INSERT OR IGNORE INTO endpoints (id,name,url,user,passwd,auth,askomics) "+\ + "VALUES(1,'"+name+"'," \ + +"'"+url+"'," \ + +"'"+user+"'," \ + +"'"+passwd+"'," \ + +"'"+auth+"'," \ + + "1 )" + + c.execute(reqSql) + conn.commit() + #test + reqSql = ''' + INSERT OR IGNORE INTO endpoints (id,name,url,user,passwd,auth,askomics) + VALUES(2,'Askomics-Regine','http://openstack-192-168-100-46.genouest.org/virtuoso/sparql','NULL','NULL','NULL',1 ) + ''' + + c.execute(reqSql) + conn.commit() + conn.close() + + def saveEndpoint(self,name,url,isAskomics,user=None,passwd=None,auth=None): + + conn = sqlite3.connect(self.pathdb,uri=True) + c = conn.cursor() + + if not user: + user = 'NULL' + if not passwd: + passwd = 'NULL' + if not auth: + auth = 'NULL' + askomics = '0' + if isAskomics: + askomics = '1' + + reqSql = "INSERT INTO endpoints VALUES ("\ + + "NULL," \ + +"'"+name+"'," \ + +"'"+url+"'," \ + +"'"+user+"'," \ + +"'"+passwd+"'," \ + +"'"+auth+"'," \ + +"'"+askomics+"'" \ + + ");" + + c.execute(reqSql) + ID = c.lastrowid + + conn.commit() + conn.close() + return ID + + def updateEndpoint(self,id,name,url,isAskomics,user=None,passwd=None,auth=None): + + conn = sqlite3.connect(self.pathdb,uri=True) + c = conn.cursor() + + if not user: + user = 'NULL' + if not passwd: + passwd = 'NULL' + if not auth: + auth = 'NULL' + askomics = '0' + if isAskomics: + askomics = '1' + + reqSql = "UPDATE endpoints SET "\ + + " name = '"+ name +"'," \ + + " url = '"+ url +"'," \ + + " user = '"+ user +"'," \ + + " passwd = '"+ passwd +"'," \ + + " auth = '"+ auth +"'," \ + + " askomics = '"+ askomics +"'" \ + + " WHERE id = "+str(id) + + c.execute(reqSql) + conn.commit() + conn.close() + + + def listEndpoints(self): + data = [] + try: + conn = sqlite3.connect(self.pathdb,uri=True) + conn.row_factory = sqlite3.Row + + c = conn.cursor() + + reqSql = """ SELECT name, url, user, passwd, auth, askomics FROM endpoints""" + + c.execute(reqSql) + rows = c.fetchall() + + for row in rows: + d = {} + d['name'] = row['name'] + d['endpoint'] = row['url'] + if row['user'] != None and row['user'] != 'NULL': + d['user'] = row['user'] + if row['passwd'] != None and row['passwd'] != 'NULL' : + d['passwd'] = row['passwd'] + if row['auth'] != None and row['auth'] != 'NULL' : + d['auth'] = row['auth'] + d['askomics'] = (row['askomics'] == '1') + + data.append(d) + + except sqlite3.OperationalError as e : + self.log.info("Endpoints database does not exist .") + + + c.execute(reqSql) + conn.commit() + conn.close() + return data + + def removeJob(self,id): + conn = sqlite3.connect(self.pathdb,uri=True) + c = conn.cursor() + + reqSql = "DELETE FROM endpoints WHERE id = "+ str(id) + + try: + c.execute(reqSql) + conn.commit() + except sqlite3.OperationalError as e : + self.log.info("Jobs database does not exist .") + + conn.close() + + def drop(self): + conn = sqlite3.connect(self.pathdb,uri=True) + c = conn.cursor() + + reqSql = "DROP table endpoints;" + + try: + c.execute(reqSql) + conn.commit() + except sqlite3.OperationalError as e : + self.log.info("Jobs database does not exist .") + + conn.close() diff --git a/askomics/libaskomics/JobManager.py b/askomics/libaskomics/JobManager.py index 343920aa..e895a2cc 100644 --- a/askomics/libaskomics/JobManager.py +++ b/askomics/libaskomics/JobManager.py @@ -82,7 +82,7 @@ def updateEndSparqlJob(self,jobid,state,nr=-1, data=None, file=None): + " data = "+ d +"," \ + " file = "+ f \ + " WHERE jobID = "+str(jobid) - print(reqSql) + c.execute(reqSql) conn.commit() conn.close() diff --git a/askomics/libaskomics/ParamManager.py b/askomics/libaskomics/ParamManager.py index 70577d25..d29e3a9a 100644 --- a/askomics/libaskomics/ParamManager.py +++ b/askomics/libaskomics/ParamManager.py @@ -52,6 +52,15 @@ def __init__(self, settings, session): 'date': json.dumps } + def get_db_directory(self): + + path = self.userfilesdir+"db/" + + if not os.path.isdir(path): + os.makedirs(path) + + return path + def get_upload_directory(self): """Get the upload directory of a user, create it if not exist diff --git a/askomics/libaskomics/TripleStoreExplorer.py b/askomics/libaskomics/TripleStoreExplorer.py index b3547cf6..6449f75a 100644 --- a/askomics/libaskomics/TripleStoreExplorer.py +++ b/askomics/libaskomics/TripleStoreExplorer.py @@ -40,14 +40,15 @@ def get_start_points(self): sqg = SparqlQueryGraph(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) - results = ql.process_query(sqg.get_start_point().query) + results = ql.process_query(sqg.get_public_start_point().query) + results += ql.process_query(sqg.get_user_start_point().query) for result in results: g = result["g"] uri = result["nodeUri"] label = result["nodeLabel"] - if 'private' in result['accesLevel']: + if ('accesLevel' in result) and ('private' in result['accesLevel']): public = False private = True else: @@ -58,7 +59,7 @@ def get_start_points(self): return nodes - def getUserAbstraction(self,service): + def getUserAbstraction(self): """ Get the user abstraction (relation and entity as subject and object) @@ -71,11 +72,15 @@ def getUserAbstraction(self,service): sqg = SparqlQueryGraph(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) - data['relations'] = ql.process_query(sqg.get_abstraction_relation('owl:ObjectProperty').query) + data['relations'] = ql.process_query(sqg.get_public_abstraction_relation('owl:ObjectProperty').query) + data['relations'] += ql.process_query(sqg.get_user_abstraction_relation('owl:ObjectProperty').query) data['subclassof'] = ql.process_query(sqg.get_isa_relation_entities().query) - data['entities'] = ql.process_query(sqg.get_abstraction_entity().query) - data['attributes'] = ql.process_query(sqg.get_abstraction_attribute_entity().query) - data['categories'] = ql.process_query(sqg.get_abstraction_category_entity().query) + data['entities'] = ql.process_query(sqg.get_public_abstraction_entity().query) + data['entities'] += ql.process_query(sqg.get_user_abstraction_entity().query) + data['attributes'] = ql.process_query(sqg.get_public_abstraction_attribute_entity().query) + data['attributes'] += ql.process_query(sqg.get_user_abstraction_attribute_entity().query) + data['categories'] = ql.process_query(sqg.get_public_abstraction_category_entity().query) + data['categories'] += ql.process_query(sqg.get_user_abstraction_category_entity().query) data['positionable'] = ql.process_query(sqg.get_abstraction_positionable_entity().query) data['graph'] = sqg.getGraphUser() @@ -119,59 +124,19 @@ def build_sparql_query_from_json(self, fromgraphs, variates, constraintes_relati select = ' '.join(variates) sqb = SparqlQueryBuilder(self.settings, self.session) - query_launcher = QueryLauncher(self.settings, self.session) query = self.build_recursive_block('', constraintes_relations) # if limit != None and limit > 0: # query += ' LIMIT ' + str(limit) if send_request_to_tps: + query_launcher = QueryLauncher(self.settings, self.session,federationRequest=True,external_lendpoints=[]) results = query_launcher.process_query(sqb.custom_query(fromgraphs, select, query).query) else: results = [] return results, sqb.custom_query(fromgraphs, select, query).query - #FIXME: DEAD CODE ?? - def build_sparql_query_from_json2(self, variates, constraintes_relations, limit, send_request_to_TPS): - """ - build a sparql query from json - """ - self.log.debug("variates") - self.log.debug(variates) - self.log.debug("constraintes_relations") - self.log.debug(constraintes_relations) - - sqb = SparqlQueryBuilder(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - - req = "" - req += "SELECT DISTINCT "+' '.join(variates)+"\n" - #TODO OFI: External Service do not work and, anyway, graphes have to be selectionned by the user in the UI - # - #for graph in namedGraphs: - # req += "FROM "+ "<"+graph+ ">"+"\n" - req += "WHERE \n" - req += self.build_recursive_block('', constraintes_relations) - if limit != None and limit >0 : - req +=" LIMIT "+str(limit) - - - sqb = SparqlQueryBuilder(self.settings, self.session) - prefixes = sqb.header_sparql_config(req) - query = prefixes+req - - results = {} - - if send_request_to_TPS: - ql = QueryLauncher(self.settings, self.session) - results = ql.process_query(query) - else: - # add comment inside query to inform user - query = "# endpoint = "+self.get_param("askomics.endpoint") + "\n" + query - - return results, query - def get_prefix_uri(self): sqg = SparqlQueryGraph(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) diff --git a/askomics/libaskomics/rdfdb/QueryLauncher.py b/askomics/libaskomics/rdfdb/QueryLauncher.py index f5d4a752..1a12c9e0 100755 --- a/askomics/libaskomics/rdfdb/QueryLauncher.py +++ b/askomics/libaskomics/rdfdb/QueryLauncher.py @@ -10,6 +10,7 @@ import urllib.request from askomics.libaskomics.ParamManager import ParamManager +from askomics.libaskomics.EndpointManager import EndpointManager class SPARQLError(RuntimeError): """ @@ -28,10 +29,38 @@ class QueryLauncher(ParamManager): from these preformated results using a ResultsBuilder instance. """ - def __init__(self, settings, session): + def __init__(self, settings, session,federationRequest=False,external_lendpoints=None): ParamManager.__init__(self, settings, session) - self.log = logging.getLogger(__name__) + #comments added in sparql request to get all url endpoint. + self.commentsForFed="" + em = EndpointManager(settings, session) + self.lendpoints = [] + + if federationRequest: + lendpoints = external_lendpoints + em.listEndpoints() + + if len(lendpoints)==0 : + raise Exception("None endpoint are defined.") + + if len(lendpoints)==1 : + self.lendpoints = lendpoints + # no need federation + return + i=0 + for endp in lendpoints: + i+=1 + #self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' + self.commentsForFed+="#endpoint,"+str(i)+','+endp['endpoint']+',false\n' + + d = {} + d['name'] = 'Federation request' + if not self.is_defined("askomics.fdendpoint") : + raise Exception("can not find askomics.fdendpoint property in the config file !") + d['endpoint'] = self.get_param("askomics.fdendpoint") + self.lendpoints.append(d) + else: + self.lendpoints = em.listEndpoints() def setup_opener(self, proxy_config): """ @@ -76,6 +105,10 @@ def execute_query(self, query, log_raw_results=True, externalService=None): if you're doing a select and parsing the results with parse_results. """ + #query = "#endpoint,askomics,http://localhost:8890/sparql/,false\n"+\ + # "#endpoint,regine,http://openstack-192-168-100-46.genouest.org/virtuoso/sparql,false\n"+\ + # query + # Proxy handling if self.is_defined("askomics.proxy"): proxy_config = self.get_param("askomics.proxy") @@ -89,11 +122,13 @@ def execute_query(self, query, log_raw_results=True, externalService=None): # if not line.startswith('PREFIX ')) self.log.debug("----------- QUERY --------------\n%s", query_log) + time0 = time.time() + if externalService is None : urlupdate = None if self.is_defined("askomics.updatepoint"): urlupdate = self.get_param("askomics.updatepoint") - time0 = time.time() + if self.is_defined("askomics.endpoint"): data_endpoint = SPARQLWrapper(self.get_param("askomics.endpoint"), urlupdate) else: @@ -111,7 +146,15 @@ def execute_query(self, query, log_raw_results=True, externalService=None): if self.is_defined("askomics.endpoint.auth"): data_endpoint.setHTTPAuth(self.get_param("askomics.endpoint.auth")) # Basic or Digest else: - data_endpoint = externalService + urlupdate = None + if 'updatepoint' in externalService: + data_endpoint = SPARQLWrapper(externalService['endpoint'], urlupdate) + else: + data_endpoint = SPARQLWrapper(externalService['endpoint']) + if ('user' in externalService) and ('passwd' in externalService): + data_endpoint.setCredentials(externalService['user'], externalService['passwd']) + if 'auth' in externalService: + data_endpoint.setHTTPAuth(externalService['auth']); data_endpoint.setQuery(query) data_endpoint.method = 'POST' @@ -184,11 +227,15 @@ def process_query(self, query): ''' Execute query and parse the results if exist ''' - json_query = self.execute_query(query, log_raw_results=False) - results = self.parse_results(json_query) - return results + results = [] + query = self.commentsForFed + query + for es in self.lendpoints: + json_query = self.execute_query(query, externalService=es, log_raw_results=False) + results += self.parse_results(json_query) + + return results def format_results_csv(self, data, headers): """write the csv result file from a data ist diff --git a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py index 7f040a16..edb22e1a 100644 --- a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py +++ b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py @@ -35,25 +35,36 @@ def query_exemple(self): 'query': '?s ?p ?o .' }) - def get_start_point(self): + def get_public_start_point(self): """ Get the start point and in which graph they are """ self.log.debug('---> get_start_point') + + return self.build_query_on_the_fly({ + 'select': '?g ?nodeUri ?nodeLabel', + 'query': 'GRAPH ?g {\n'+ + '\t?nodeUri displaySetting:entity "true"^^xsd:boolean .\n' + + '\t?nodeUri displaySetting:startPoint "true"^^xsd:boolean .\n' + + '\t?nodeUri rdfs:label ?nodeLabel.\n'+ + "\t?g :accessLevel 'public'.\n"+ + "}" + }, True) + + def get_user_start_point(self): + """ + Get the start point and in which graph they are + """ + self.log.debug('---> get_start_point') + return self.build_query_on_the_fly({ 'select': '?g ?nodeUri ?nodeLabel ?accesLevel', 'query': 'GRAPH ?g {\n'+ '\t?nodeUri displaySetting:entity "true"^^xsd:boolean .\n' + '\t?nodeUri displaySetting:startPoint "true"^^xsd:boolean .\n' + '\t?nodeUri rdfs:label ?nodeLabel.\n'+ - "\t{\n"+ - "\t\t{ ?g :accessLevel ?accesLevel.\n"+ - "\t\t\tVALUES ?accesLevel { 'public' }."+ - "\t\t}\n"+ - "\t\tUNION\n"+ - "\t\t{ ?g :accessLevel ?accesLevel.\n "+ - "\t\t?g dc:creator '" + self.session['username'] + "' }\n"+ - "\t}\n."+ + "\t?g :accessLevel ?accesLevel.\n "+ + "\t?g dc:creator '" + self.session['username'] + "'\n"+ "}" }, True) @@ -70,7 +81,7 @@ def get_prefix_uri(self): '\t?nodeUri displaySetting:prefixUri ?prefUri.\n'+ "\t{\n"+ "\t\t{ ?g :accessLevel ?accesLevel.\n"+ - "\t\t\tVALUES ?accesLevel { 'public' }."+ + "\t\t\tFILTER ( ?accesLevel = 'public' )."+ "\t\t}\n"+ "\t\tUNION\n"+ "\t\t{ ?g :accessLevel ?accesLevel.\n "+ @@ -163,7 +174,7 @@ def get_all_taxons(self): '}' }, True) - def get_abstraction_attribute_entity(self): + def get_public_abstraction_attribute_entity(self): """ Get all attributes of an entity """ @@ -177,15 +188,29 @@ def get_abstraction_attribute_entity(self): '\t rdfs:domain ?entity ;\n' + '\t rdfs:range ?typeAttribute .\n\n' + '\tOPTIONAL {?attribute displaySetting:attributeOrder ?order .}\n' + - '\t}'+ - '\t{'+ - '\t\t{ ?g :accessLevel "public". }'+ - '\t\tUNION '+ - '\t\t{ ?g dc:creator "'+self.session['username']+'".}'+ + '\t?g :accessLevel "public". '+ + '}' + }, True) + + def get_user_abstraction_attribute_entity(self): + """ + Get all attributes of an entity + """ + return self.build_query_on_the_fly({ + 'select': '?g ?entity ?attribute ?labelAttribute ?typeAttribute ?order', + 'query': 'Graph ?g {\n' + + '\t?entity displaySetting:entity "true"^^xsd:boolean .\n\n' + + '\t?attribute displaySetting:attribute "true"^^xsd:boolean .\n\n' + + '\t?attribute rdf:type owl:DatatypeProperty ;\n' + + '\t rdfs:label ?labelAttribute ;\n' + + '\t rdfs:domain ?entity ;\n' + + '\t rdfs:range ?typeAttribute .\n\n' + + '\tOPTIONAL {?attribute displaySetting:attributeOrder ?order .}\n' + + '\t?g dc:creator "'+self.session['username']+'".'+ '}' }, True) - def get_abstraction_relation(self, prop): + def get_public_abstraction_relation(self, prop): """ Get the relation of an entity """ @@ -195,16 +220,25 @@ def get_abstraction_relation(self, prop): '\t rdfs:domain ?subject ;\n' + '\t rdfs:range ?object .\n'+ '\t?subject displaySetting:entity "true"^^xsd:boolean .\n\n' + - '\t{'+ - '\t\t{ ?g :accessLevel "public". }'+ - '\t\tUNION '+ - '\t\t{?g dc:creator "'+self.session['username']+'" .}'+ - '\t}'+ + '\t?g :accessLevel "public". '+ '}' }, True) + def get_user_abstraction_relation(self, prop): + """ + Get the relation of an entity + """ + return self.build_query_on_the_fly({ + 'select': '?g ?d ?subject ?relation ?object', + 'query': 'GRAPH ?g { ?relation rdf:type ' + prop + ' ;\n' + + '\t rdfs:domain ?subject ;\n' + + '\t rdfs:range ?object .\n'+ + '\t?subject displaySetting:entity "true"^^xsd:boolean .\n\n' + + '\t?g dc:creator "'+self.session['username']+'" .'+ + '}' + }, True) - def get_abstraction_entity(self): + def get_public_abstraction_entity(self): """ Get theproperty of an entity """ @@ -212,11 +246,19 @@ def get_abstraction_entity(self): 'select': '?g ?entity ?property ?value', 'query': 'GRAPH ?g { ?entity ?property ?value .\n' + '\t?entity displaySetting:entity "true"^^xsd:boolean .\n' + - '\t{'+ - '\t\t{ ?g :accessLevel "public". }'+ - '\t\tUNION '+ - '\t\t{?g dc:creator "'+self.session['username']+'" .}'+ - '\t}'+ + '\t?g :accessLevel "public".'+ + '}' + }, True) + + def get_user_abstraction_entity(self): + """ + Get theproperty of an entity + """ + return self.build_query_on_the_fly({ + 'select': '?g ?entity ?property ?value', + 'query': 'GRAPH ?g { ?entity ?property ?value .\n' + + '\t?entity displaySetting:entity "true"^^xsd:boolean .\n' + + '\t?g dc:creator "'+self.session['username']+'" .'+ '}' }, True) @@ -230,7 +272,7 @@ def get_abstraction_positionable_entity(self): '?entity displaySetting:is_positionable "true"^^xsd:boolean .}' }, True) - def get_abstraction_category_entity(self): + def get_public_abstraction_category_entity(self): """ Get the category of an entity """ @@ -243,12 +285,26 @@ def get_abstraction_category_entity(self): '\t rdfs:domain ?entity;\n' + '\t rdfs:range ?typeCategory.\n' + '\tOPTIONAL {?category displaySetting:attributeOrder ?order .}\n' + - '\t?typeCategory displaySetting:category [] .\n' + - '\t}'+ - '\t{'+ - '\t\t{ ?g :accessLevel "public". }'+ - '\t\tUNION '+ - '\t\t{?g dc:creator "'+self.session['username']+'" .}'+ + '\t?typeCategory displaySetting:category ?catStuff .\n' + + '\t?g :accessLevel "public".'+ + '\t}' + }, True) + + def get_user_abstraction_category_entity(self): + """ + Get the category of an entity + """ + return self.build_query_on_the_fly({ + 'select': '?g ?entity ?category ?labelCategory ?typeCategory ?order', + 'query': 'GRAPH ?g { \n'+ + '\t?entity displaySetting:entity "true"^^xsd:boolean .\n' + + '\t?category rdf:type owl:ObjectProperty ;\n' + + '\t rdfs:label ?labelCategory ;\n' + + '\t rdfs:domain ?entity;\n' + + '\t rdfs:range ?typeCategory.\n' + + '\tOPTIONAL {?category displaySetting:attributeOrder ?order .}\n' + + '\t?typeCategory displaySetting:category ?catStuff .\n' + + '\t?g dc:creator "'+self.session['username']+'" .'+ '\t}' }, True) diff --git a/askomics/static/src/js/core/AskomicsUserAbstraction.js b/askomics/static/src/js/core/AskomicsUserAbstraction.js index 9c2ee5d5..1b163a75 100644 --- a/askomics/static/src/js/core/AskomicsUserAbstraction.js +++ b/askomics/static/src/js/core/AskomicsUserAbstraction.js @@ -47,15 +47,19 @@ class AskomicsUserAbstraction { } getEntities() { + let listE = {} ; + for (let g in this.entityInformationList ) { if ( this.isDesactivedGraph(g) ) continue; + for (let e in this.entityInformationList[g]) { if (! (e in listE ) ) { listE[e]=0; } } } + return JSON.parse(JSON.stringify(listE)) ; } @@ -168,10 +172,9 @@ class AskomicsUserAbstraction { iua.entityInformationList[graph][uri][rel] = val; } - //console.log("entityInformationList:"+JSON.stringify(iua.entityInformationList)); for (let entry2 in resultListTripletSubjectRelationObject.attributes){ - console.log("ATTRIBUTE:"+JSON.stringify(resultListTripletSubjectRelationObject.attributes[entry2])); + //console.log("ATTRIBUTE:"+JSON.stringify(resultListTripletSubjectRelationObject.attributes[entry2])); let graph = resultListTripletSubjectRelationObject.attributes[entry2].g; let uri2 = resultListTripletSubjectRelationObject.attributes[entry2].entity; let attribute = {}; @@ -233,7 +236,7 @@ class AskomicsUserAbstraction { } iua.entitySubclassof[duo.uri].push(duo.urisub); } - console.log(JSON.stringify(iua.entitySubclassof)); + //console.log(JSON.stringify(iua.entitySubclassof)); //console.log("=================== attributesEntityList ========================="); //console.log(JSON.stringify(iua.attributesEntityList)); }); @@ -243,7 +246,7 @@ class AskomicsUserAbstraction { let iua = this; if (ns in this.prefix_error) { - console.log("erreur........................."); + //console.log("erreur........................."); return ns; } @@ -339,16 +342,17 @@ class AskomicsUserAbstraction { */ getRelationsObjectsAndSubjectsWithURI(UriSelectedNode) { - let objectsTarget = {} ; let subjectsTarget = {} ; let lentities = this.getEntities(); for (let i in this.tripletSubjectRelationObject) { - if (this.isDesactivedGraph(this.tripletSubjectRelationObject[i].g) ) continue; - + if (this.isDesactivedGraph(this.tripletSubjectRelationObject[i].g) ) { + continue; + } if ( this.tripletSubjectRelationObject[i].object == UriSelectedNode ) { + /* check if graph is not removed */ if ( !(this.tripletSubjectRelationObject[i].subject in lentities) ) continue; @@ -361,6 +365,7 @@ class AskomicsUserAbstraction { } } if ( this.tripletSubjectRelationObject[i].subject == UriSelectedNode ) { + /* check if graph is not removed */ if ( !(this.tripletSubjectRelationObject[i].object in lentities) ) continue; diff --git a/askomics/static/src/js/core/IHMLocal.js b/askomics/static/src/js/core/IHMLocal.js index 0c1453f2..72159669 100644 --- a/askomics/static/src/js/core/IHMLocal.js +++ b/askomics/static/src/js/core/IHMLocal.js @@ -259,8 +259,6 @@ class IHMLocal { // Sort inputs let inputs = $("#spdiv"); inputs.children().detach().sort(function(a, b) { - console.log("a:"+a); - console.log("a:"+b); return $(a).attr("id").localeCompare($(b).attr("id")); }).appendTo(inputs); @@ -614,8 +612,6 @@ class IHMLocal { // get the file's content $("#spinner_galaxy-upload").removeClass("hidden"); let dataset = $('input[name=upload-galaxy]:checked').val(); - console.log('gid'); - console.log(dataset); let service2 = new RestServiceJs('get_galaxy_file_content'); let model = {'dataset': dataset}; service2.post(model, function(data) { diff --git a/configs/development.virtuoso.ini b/configs/development.virtuoso.ini index de45d915..1ef8b31b 100644 --- a/configs/development.virtuoso.ini +++ b/configs/development.virtuoso.ini @@ -19,7 +19,8 @@ askomics.debug = true # Triplestore configuration askomics.endpoint = http://localhost:8890/sparql askomics.updatepoint = http://localhost:8890/sparql -#askomics.endpoint = http://localhost:8000/test +#askomics.endpoint = http://localhost:4040/test +askomics.fdendpoint=http://localhost:4040/test askomics.hack_virtuoso = true askomics.upload_user_data_method=load askomics.max_content_size_to_update_database = 4000 From a4c0be96fcec7ca2c6aa5b1f2c95216e5035d616 Mon Sep 17 00:00:00 2001 From: Olivier Filangi Date: Mon, 30 Oct 2017 13:18:12 +0100 Subject: [PATCH 004/136] user interface to defined askomics endpoint --- askomics/__init__.py | 4 + askomics/ask_view.py | 82 +++++++++- askomics/libaskomics/EndpointManager.py | 144 ++++++++++-------- askomics/libaskomics/rdfdb/QueryLauncher.py | 46 ++++-- askomics/static/src/js/core/IHMLocal.js | 97 +++++++++++- .../src/templates/handlebars/add_endpoint.hbs | 19 +++ .../src/templates/handlebars/endpoints.hbs | 40 +++++ .../src/templates/handlebars/navbar.hbs | 1 + askomics/static/src/templates/index.pt | 3 + 9 files changed, 353 insertions(+), 83 deletions(-) create mode 100644 askomics/static/src/templates/handlebars/add_endpoint.hbs create mode 100644 askomics/static/src/templates/handlebars/endpoints.hbs diff --git a/askomics/__init__.py b/askomics/__init__.py index ffd87424..d6b85c5b 100644 --- a/askomics/__init__.py +++ b/askomics/__init__.py @@ -29,6 +29,10 @@ def main(global_config, **settings): config.add_route('empty_user_database', '/empty_user_database') config.add_route('list_user_graph', '/list_user_graph') config.add_route('delete_graph', '/delete_graph') + config.add_route('list_endpoints', '/list_endpoints') + config.add_route('delete_endpoints', '/delete_endpoints') + config.add_route('add_endpoint', '/add_endpoint') + config.add_route('enable_endpoints', '/enable_endpoints') config.add_route('getUserAbstraction', '/userAbstraction') config.add_route('sparqlquery', '/sparqlquery') config.add_route('getSparqlQueryInTextFormat', '/getSparqlQueryInTextFormat') diff --git a/askomics/ask_view.py b/askomics/ask_view.py index 4de130d3..2aa71988 100644 --- a/askomics/ask_view.py +++ b/askomics/ask_view.py @@ -20,13 +20,18 @@ from askomics.libaskomics.ParamManager import ParamManager from askomics.libaskomics.ModulesManager import ModulesManager from askomics.libaskomics.JobManager import JobManager +from askomics.libaskomics.EndpointManager import EndpointManager + from askomics.libaskomics.TripleStoreExplorer import TripleStoreExplorer from askomics.libaskomics.SourceFileConvertor import SourceFileConvertor + from askomics.libaskomics.rdfdb.SparqlQueryBuilder import SparqlQueryBuilder from askomics.libaskomics.rdfdb.SparqlQueryGraph import SparqlQueryGraph from askomics.libaskomics.rdfdb.SparqlQueryStats import SparqlQueryStats from askomics.libaskomics.rdfdb.SparqlQueryAuth import SparqlQueryAuth + from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher + from askomics.libaskomics.source_file.SourceFile import SourceFile from askomics.libaskomics.GalaxyConnector import GalaxyConnector @@ -290,7 +295,7 @@ def empty_database(self): @view_config(route_name='delete_graph', request_method='POST') def delete_graph(self): """ - Delete selected named graphs and their metadatas + """ self.checkAuthSession() @@ -308,6 +313,68 @@ def delete_graph(self): #delete metadatas ql.execute_query(sqb.get_delete_metadatas_of_graph(graph).query) + @view_config(route_name='delete_endpoints', request_method='POST') + def delete_endpoints(self): + """ + + """ + + self.checkAuthSession() + + if 'endpoints' not in self.request.json_body: + raise exc.exception_response(404) + + em = EndpointManager(self.settings, self.request.session) + + for id in self.request.json_body['endpoints']: + em.remove(id) + + @view_config(route_name='add_endpoint', request_method='POST') + def add_endpoint(self): + """ + + """ + + self.checkAuthSession() + + if 'name' not in self.request.json_body: + raise exc.exception_response(404) + if 'url' not in self.request.json_body: + raise exc.exception_response(404) + if 'auth' not in self.request.json_body: + raise exc.exception_response(404) + + name = self.request.json_body['name'] + url = self.request.json_body['url'] + auth = self.request.json_body['auth'] + + em = EndpointManager(self.settings, self.request.session) + em.saveEndpoint(name,url,auth,True) + + @view_config(route_name='enable_endpoints', request_method='POST') + def enable_endpoints(self): + """ + + """ + + self.checkAuthSession() + + if 'id' not in self.request.json_body: + raise exc.exception_response(404) + if 'enable' not in self.request.json_body: + raise exc.exception_response(404) + + id = self.request.json_body['id'] + enable = self.request.json_body['enable'] + + em = EndpointManager(self.settings, self.request.session) + + if enable: + em.enable(id) + else: + em.disable(id,"") + + @view_config(route_name='list_user_graph', request_method='GET') def list_user_graph(self): """ @@ -346,6 +413,19 @@ def list_user_graph(self): return named_graphs + @view_config(route_name='list_endpoints', request_method='GET') + def list_endpoints(self): + """ + Return a list with all the named graphs of a user. + """ + + self.checkAuthSession() + + em = EndpointManager(self.settings, self.request.session) + + return em.listEndpoints() + + @view_config(route_name='guess_csv_header_type', request_method='POST') def guess_csv_header_type(self): """Guess the headers type of a csv file diff --git a/askomics/libaskomics/EndpointManager.py b/askomics/libaskomics/EndpointManager.py index 0c5a10a6..537ba9a0 100644 --- a/askomics/libaskomics/EndpointManager.py +++ b/askomics/libaskomics/EndpointManager.py @@ -22,70 +22,42 @@ def __init__(self, settings, session): id INTEGER PRIMARY KEY AUTOINCREMENT, name text, url text, - user text, - passwd text, auth text, - askomics integer + enable integer, + message text )''' c.execute(reqSql) conn.commit() - name = 'Askomics-'+platform.node() - url = self.get_param("askomics.endpoint") - user = 'NULL' - if self.is_defined("askomics.endpoint_username") : - user = self.get_param("askomics.endpoint_username") - passwd = 'NULL' - if self.is_defined("askomics.endpoint_passwd"): - passwd = self.get_param("askomics.endpoint_passwd") - auth = 'NULL' - if self.is_defined("askomics.askomics.endpoint.auth"): - auth = self.get_param("askomics.askomics.endpoint.auth") - - reqSql ="INSERT OR IGNORE INTO endpoints (id,name,url,user,passwd,auth,askomics) "+\ - "VALUES(1,'"+name+"'," \ - +"'"+url+"'," \ - +"'"+user+"'," \ - +"'"+passwd+"'," \ - +"'"+auth+"'," \ - + "1 )" - - c.execute(reqSql) - conn.commit() #test - reqSql = ''' - INSERT OR IGNORE INTO endpoints (id,name,url,user,passwd,auth,askomics) - VALUES(2,'Askomics-Regine','http://openstack-192-168-100-46.genouest.org/virtuoso/sparql','NULL','NULL','NULL',1 ) - ''' + #reqSql = ''' + #INSERT OR IGNORE INTO endpoints (id,name,url,user,passwd,auth,askomics) + #VALUES(2,'Askomics-Regine','http://openstack-192-168-100-46.genouest.org/virtuoso/sparql','NULL','NULL','NULL',1 ) + #''' - c.execute(reqSql) - conn.commit() + #c.execute(reqSql) + #conn.commit() conn.close() - def saveEndpoint(self,name,url,isAskomics,user=None,passwd=None,auth=None): + def saveEndpoint(self,name,url,auth,isenable): conn = sqlite3.connect(self.pathdb,uri=True) c = conn.cursor() - if not user: - user = 'NULL' - if not passwd: - passwd = 'NULL' if not auth: auth = 'NULL' - askomics = '0' - if isAskomics: - askomics = '1' + enable = '0' + if isenable: + enable = '1' reqSql = "INSERT INTO endpoints VALUES ("\ + "NULL," \ +"'"+name+"'," \ +"'"+url+"'," \ - +"'"+user+"'," \ - +"'"+passwd+"'," \ +"'"+auth+"'," \ - +"'"+askomics+"'" \ + + enable +","\ + + "''" \ + ");" c.execute(reqSql) @@ -95,36 +67,38 @@ def saveEndpoint(self,name,url,isAskomics,user=None,passwd=None,auth=None): conn.close() return ID - def updateEndpoint(self,id,name,url,isAskomics,user=None,passwd=None,auth=None): + def enable(self,id): conn = sqlite3.connect(self.pathdb,uri=True) c = conn.cursor() - if not user: - user = 'NULL' - if not passwd: - passwd = 'NULL' - if not auth: - auth = 'NULL' - askomics = '0' - if isAskomics: - askomics = '1' - reqSql = "UPDATE endpoints SET "\ - + " name = '"+ name +"'," \ - + " url = '"+ url +"'," \ - + " user = '"+ user +"'," \ - + " passwd = '"+ passwd +"'," \ - + " auth = '"+ auth +"'," \ - + " askomics = '"+ askomics +"'" \ + + " enable = 1 ," \ + + " message = '' " \ + " WHERE id = "+str(id) c.execute(reqSql) conn.commit() conn.close() + self.listEndpoints() + + def disable(self,id,message): + conn = sqlite3.connect(self.pathdb,uri=True) + c = conn.cursor() + + reqSql = "UPDATE endpoints SET "\ + + " enable = 0 , " \ + + " message = '"+message+"' " \ + + " WHERE id = "+str(id) + print(reqSql) + c.execute(reqSql) + conn.commit() + conn.close() + self.listEndpoints() def listEndpoints(self): + self.log.info(" == listEndpoints == ") data = [] try: conn = sqlite3.connect(self.pathdb,uri=True) @@ -132,22 +106,59 @@ def listEndpoints(self): c = conn.cursor() - reqSql = """ SELECT name, url, user, passwd, auth, askomics FROM endpoints""" + reqSql = """SELECT id, name, url, auth, enable, message FROM endpoints""" c.execute(reqSql) rows = c.fetchall() + self.log.info("nb row:"+str(len(rows))) + for row in rows: + + d = {} + d['id'] = row['id'] + d['name'] = row['name'] + d['endpoint'] = row['url'] + if row['auth'] != None and row['auth'] != 'NULL' : + d['auth'] = row['auth'] + else: + d['auth'] = '' + d['enable'] = (row['enable'] == 1) + d['message'] = row['message'] + data.append(d) + + except sqlite3.OperationalError as e : + self.log.info("Endpoints database does not exist .") + + c.execute(reqSql) + conn.commit() + conn.close() + return data + + def listActiveEndpoints(self): + data = [] + try: + conn = sqlite3.connect(self.pathdb,uri=True) + conn.row_factory = sqlite3.Row + + c = conn.cursor() + + reqSql = """SELECT id, name, url, auth, enable, message FROM endpoints WHERE enable == 1 """ + + c.execute(reqSql) + rows = c.fetchall() + self.log.info("nb row:"+str(len(rows))) for row in rows: + d = {} + d['id'] = row['id'] d['name'] = row['name'] d['endpoint'] = row['url'] - if row['user'] != None and row['user'] != 'NULL': - d['user'] = row['user'] - if row['passwd'] != None and row['passwd'] != 'NULL' : - d['passwd'] = row['passwd'] if row['auth'] != None and row['auth'] != 'NULL' : d['auth'] = row['auth'] - d['askomics'] = (row['askomics'] == '1') + else: + d['auth'] = '' + d['enable'] = (row['enable'] == 1) + d['message'] = row['message'] data.append(d) @@ -160,7 +171,8 @@ def listEndpoints(self): conn.close() return data - def removeJob(self,id): + + def remove(self,id): conn = sqlite3.connect(self.pathdb,uri=True) c = conn.cursor() diff --git a/askomics/libaskomics/rdfdb/QueryLauncher.py b/askomics/libaskomics/rdfdb/QueryLauncher.py index 1a12c9e0..a0b75f8e 100755 --- a/askomics/libaskomics/rdfdb/QueryLauncher.py +++ b/askomics/libaskomics/rdfdb/QueryLauncher.py @@ -38,29 +38,32 @@ def __init__(self, settings, session,federationRequest=False,external_lendpoints self.lendpoints = [] if federationRequest: - lendpoints = external_lendpoints + em.listEndpoints() - + lendpoints = [] + if external_lendpoints: + lendpoints = external_lendpoints + lendpoints += em.listActiveEndpoints() if len(lendpoints)==0 : - raise Exception("None endpoint are defined.") - - if len(lendpoints)==1 : - self.lendpoints = lendpoints # no need federation return i=0 for endp in lendpoints: i+=1 #self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' - self.commentsForFed+="#endpoint,"+str(i)+','+endp['endpoint']+',false\n' + self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' + #add local TPS + self.commentsForFed+="#endpoint,local,"+self.get_param("askomics.endpoint")+',false\n' d = {} d['name'] = 'Federation request' if not self.is_defined("askomics.fdendpoint") : raise Exception("can not find askomics.fdendpoint property in the config file !") d['endpoint'] = self.get_param("askomics.fdendpoint") + d['enable'] = True + d['id'] = 0 + d['auth'] = 'basic' self.lendpoints.append(d) else: - self.lendpoints = em.listEndpoints() + self.lendpoints = em.listActiveEndpoints() def setup_opener(self, proxy_config): """ @@ -146,13 +149,16 @@ def execute_query(self, query, log_raw_results=True, externalService=None): if self.is_defined("askomics.endpoint.auth"): data_endpoint.setHTTPAuth(self.get_param("askomics.endpoint.auth")) # Basic or Digest else: + if not externalService['enable'] : + self.log.debug("externalService "+externalService['name']+'('+externalService['endpoint']+') is disabled.') + return [] + urlupdate = None if 'updatepoint' in externalService: data_endpoint = SPARQLWrapper(externalService['endpoint'], urlupdate) else: data_endpoint = SPARQLWrapper(externalService['endpoint']) - if ('user' in externalService) and ('passwd' in externalService): - data_endpoint.setCredentials(externalService['user'], externalService['passwd']) + if 'auth' in externalService: data_endpoint.setHTTPAuth(externalService['auth']); @@ -177,7 +183,12 @@ def execute_query(self, query, log_raw_results=True, externalService=None): try: results = data_endpoint.query().convert() except urllib.error.URLError as URLError: - raise ValueError(URLError.reason) + #url error, we disable the endpoint + #raise ValueError(URLError.reason) + em = EndpointManager(self.settings, self.session) + em.disable(externalService['id'],str(URLError.reason)) + results = [] + #raise ValueError(URLError.reason) time1 = time.time() @@ -228,9 +239,18 @@ def process_query(self, query): Execute query and parse the results if exist ''' - results = [] - query = self.commentsForFed + query + # Federation Request case + #------------------------------------------------------ + if self.commentsForFed != '': + query = self.commentsForFed + query + es = self.lendpoints[0] + json_query = self.execute_query(query, externalService=es, log_raw_results=False) + return self.parse_results(json_query) + # call main usener dpoint askomics + json_query = self.execute_query(query, log_raw_results=False) + results = self.parse_results(json_query) + # then other askomics endpoint defined by the user for es in self.lendpoints: json_query = self.execute_query(query, externalService=es, log_raw_results=False) results += self.parse_results(json_query) diff --git a/askomics/static/src/js/core/IHMLocal.js b/askomics/static/src/js/core/IHMLocal.js index 72159669..ceaac68b 100644 --- a/askomics/static/src/js/core/IHMLocal.js +++ b/askomics/static/src/js/core/IHMLocal.js @@ -319,8 +319,7 @@ class IHMLocal { let service = new RestServiceJs('list_user_graph'); service.getAll(function(data) { let template = AskOmics.templates.datasets; - for (var i = data.length - 1; i >= 0; i--) { - } + let context = {datasets: data}; let html = template(context); $('#content_datasets').empty(); @@ -369,6 +368,97 @@ class IHMLocal { }); } + loadEndpoints() { + let service = new RestServiceJs('list_endpoints'); + service.getAll(function(data) { + let template = AskOmics.templates.endpoints; + let context = {endpoints: data}; + let html = template(context); + + $('#content_endpoints').empty(); + $('#content_endpoints').append(html); + + // hide delete button if no checkbox checked + + $(".check_ep").change(function(){ + if ($('.check_endpoint:checked').length !== 0) { + $('#delete_endpoints').removeAttr('disabled'); + }else{ + $('#delete_endpoints').attr('disabled', 'disabled'); + } + }); + + // Delete selected datasets + $('#delete_endpoints').click(function() { + let selected = []; + $('.check_endpoint').each(function() { + if ($(this).is(':checked')) {selected.push($(this).attr('name'));} + }); + let service = new RestServiceJs('delete_endpoints'); + let model = {'endpoints': selected}; + //show the spinner + $('#spinner_delete').removeClass('hidden'); + service.post(model, function(data) { + __ihm.loadEndpoints(); + __ihm.stopSession(); + __ihm.resetStats(); + }); + }); + + // sorted dataTable + $('#data-table-endpoints').DataTable({ + 'order': [[1, 'asc']], + 'columnDefs': [ + { 'orderable': false, 'targets': 0 }, + { type: 'date-euro', targets: 2 } + ] + }); + $('#add_endpoint').click(function() { + __ihm.get_add_endpoints_form(); + }); + + $('input.enable-endpoint').click(function() { + let service = new RestServiceJs('enable_endpoints'); + let model = { + 'id': $(this).closest( "tr" ).attr('id'), + 'enable' : $(this).is(":checked") + }; + service.post(model, function() { + }); + }); + }); + } + + get_add_endpoints_form() { + console.log(" +++ get_add_endpoints_form +++"); + + $('#modalTitle').text('Add Askomics endpoint'); + $('.modal-sm').css('width', '55%'); + $('.modal-body').show(); + + $('#modal').modal('show'); + $('#modal').addClass('upload-modal'); + + let template = AskOmics.templates.add_endpoint; + let html = template(); + + $('#modalMessage').html(html ); + + $('#modalButton').click(function() + { + let service = new RestServiceJs('add_endpoint'); + let model = { + name: $('#endpoint-name').val(), + url:$('#endpoint-url').val(), + auth: $('#endpoint-auth').val() + }; + service.post(model, function(data) { + __ihm.loadEndpoints(); + }); + }).text('Add'); + + } + graphname(graphn) { let date = graphn.substr(graphn.lastIndexOf('_') + 1); let new_name = graphn.substr(0,graphn.lastIndexOf('_')); @@ -1031,11 +1121,12 @@ class IHMLocal { $(this).addClass('active'); } - + console.log("ID:"+ $(this).attr('id')); if ( ! ( $(this).attr('id') in { 'help' : '','admin':'', 'user_menu': '' }) ) { $('.container').hide(); $('.container#navbar_content').show(); + console.log("===>"+'.container#content_' + $(this).attr('id')); $('.container#content_' + $(this).attr('id')).show(); } else { $('.container#navbar_content').show(); diff --git a/askomics/static/src/templates/handlebars/add_endpoint.hbs b/askomics/static/src/templates/handlebars/add_endpoint.hbs new file mode 100644 index 00000000..03ce157b --- /dev/null +++ b/askomics/static/src/templates/handlebars/add_endpoint.hbs @@ -0,0 +1,19 @@ +
+ + + + + + + + + + + + +
Name
Url
Auth + +
diff --git a/askomics/static/src/templates/handlebars/endpoints.hbs b/askomics/static/src/templates/handlebars/endpoints.hbs new file mode 100644 index 00000000..6b9bc30b --- /dev/null +++ b/askomics/static/src/templates/handlebars/endpoints.hbs @@ -0,0 +1,40 @@ +
+

Endpoints

+
+ + + + + + + + + + + + + {{#each endpoints}} + + + + + + + + + {{/each}} + +
NameUrlAuthMessageEnable
{{this.name}}{{this.endpoint}}{{this.auth}}{{this.message}}{{#if this.enable}} + + {{else}} + + {{/if}} +
+ + +
+ + +
+ + diff --git a/askomics/static/src/templates/handlebars/navbar.hbs b/askomics/static/src/templates/handlebars/navbar.hbs index d3dc0994..afbf45f4 100644 --- a/askomics/static/src/templates/handlebars/navbar.hbs +++ b/askomics/static/src/templates/handlebars/navbar.hbs @@ -17,6 +17,7 @@ {{/if}} diff --git a/askomics/static/src/templates/index.pt b/askomics/static/src/templates/index.pt index 200b8234..fdefde5b 100644 --- a/askomics/static/src/templates/index.pt +++ b/askomics/static/src/templates/index.pt @@ -188,6 +188,9 @@ + + From 68db6574c33306908f7f01744d02aa4d172a6a81 Mon Sep 17 00:00:00 2001 From: ofilangi Date: Wed, 6 Dec 2017 10:27:35 +0100 Subject: [PATCH 005/136] add group by to be compliance with sparql 1.1 --- askomics/libaskomics/rdfdb/SparqlQueryGraph.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py index edb22e1a..6c5f2e23 100644 --- a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py +++ b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py @@ -136,7 +136,8 @@ def get_user_graph_infos_with_count(self): '\t?g dc:creator ?owner .\n' + '\t?g prov:wasDerivedFrom ?name .\n'+ '\t?g :accessLevel ?access .\n' + - '}' + '}', + 'post_action': 'GROUP BY ?g' }, True) def get_if_positionable(self, uri): From 0cef557d4c8bc3f88f1aebbce389ec546a626512 Mon Sep 17 00:00:00 2001 From: ofilangi Date: Wed, 6 Dec 2017 10:28:36 +0100 Subject: [PATCH 006/136] remove module management --- askomics/__init__.py | 2 - askomics/ask_view.py | 88 +--- askomics/libaskomics/EndpointManager.py | 4 +- askomics/libaskomics/ModulesManager.py | 396 ------------------ askomics/libaskomics/rdfdb/QueryLauncher.py | 12 +- askomics/static/modules/biopax.ttl | 124 ++++++ askomics/static/modules/gene_ontology.mo | 2 +- askomics/static/modules/go.ttl | 80 ++++ askomics/static/modules/prov.ttl | 41 ++ .../src/js/core/AskomicsUserAbstraction.js | 2 +- askomics/static/src/js/core/IHMLocal.js | 6 +- askomics/static/src/js/view/integration.js | 2 - .../view/parameters/ModulesParametersView.js | 56 --- .../src/templates/handlebars/modules.hbs | 32 -- askomics/static/src/templates/index.pt | 2 - configs/development.fuseki.ini | 9 +- configs/development.rdf4j.ini | 5 +- gulpfile.js | 1 - 18 files changed, 287 insertions(+), 577 deletions(-) delete mode 100644 askomics/libaskomics/ModulesManager.py create mode 100644 askomics/static/modules/biopax.ttl create mode 100644 askomics/static/modules/go.ttl create mode 100644 askomics/static/modules/prov.ttl delete mode 100644 askomics/static/src/js/view/parameters/ModulesParametersView.js delete mode 100644 askomics/static/src/templates/handlebars/modules.hbs diff --git a/askomics/__init__.py b/askomics/__init__.py index d6b85c5b..318b4335 100644 --- a/askomics/__init__.py +++ b/askomics/__init__.py @@ -61,8 +61,6 @@ def main(global_config, **settings): # Shortcuts and modules routes config.add_route('importShortcut', '/importShortcut') config.add_route('deleteShortcut', '/deleteShortcut') - config.add_route('modules', '/modules') - config.add_route('manage_module', '/manage_module') # Data upload routes # Inspired from https://github.com/blueimp/jQuery-File-Upload/ and https://github.com/grooverdan/pyramid-jQuery-File-Upload-demo/ diff --git a/askomics/ask_view.py b/askomics/ask_view.py index 2aa71988..6b9cd4f6 100644 --- a/askomics/ask_view.py +++ b/askomics/ask_view.py @@ -18,7 +18,6 @@ from pygments.formatters import HtmlFormatter from askomics.libaskomics.ParamManager import ParamManager -from askomics.libaskomics.ModulesManager import ModulesManager from askomics.libaskomics.JobManager import JobManager from askomics.libaskomics.EndpointManager import EndpointManager @@ -313,8 +312,10 @@ def delete_graph(self): #delete metadatas ql.execute_query(sqb.get_delete_metadatas_of_graph(graph).query) + @view_config(route_name='delete_endpoints', request_method='POST') def delete_endpoints(self): + import pyramid.httpexceptions as exc """ """ @@ -331,6 +332,7 @@ def delete_endpoints(self): @view_config(route_name='add_endpoint', request_method='POST') def add_endpoint(self): + import pyramid.httpexceptions as exc """ """ @@ -353,25 +355,26 @@ def add_endpoint(self): @view_config(route_name='enable_endpoints', request_method='POST') def enable_endpoints(self): - """ + import pyramid.httpexceptions as exc + """ - """ + """ - self.checkAuthSession() + self.checkAuthSession() - if 'id' not in self.request.json_body: + if 'id' not in self.request.json_body: raise exc.exception_response(404) - if 'enable' not in self.request.json_body: + if 'enable' not in self.request.json_body: raise exc.exception_response(404) - id = self.request.json_body['id'] - enable = self.request.json_body['enable'] + id = self.request.json_body['id'] + enable = self.request.json_body['enable'] - em = EndpointManager(self.settings, self.request.session) + em = EndpointManager(self.settings, self.request.session) - if enable: + if enable: em.enable(id) - else: + else: em.disable(id,"") @@ -915,63 +918,6 @@ def deleteShortcut(self): return self.data - @view_config(route_name='modules', request_method='POST') - def modules(self): - - # Denny access for non loged users - if not self.request.session['admin'] : - return 'forbidden' - - # Denny for blocked users - if self.request.session['blocked']: - return 'blocked' - - try: - mm = ModulesManager(self.settings, self.request.session) - self.data = mm.getListModules() - except Exception as e: - traceback.print_exc(file=sys.stdout) - self.data['error'] = str(e) - self.request.response.status = 400 - - return self.data - - @view_config(route_name='manage_module', request_method='POST') - def manageModules(self): - # Denny access for non loged users - if not self.request.session['admin'] : - return 'forbidden' - - # Denny for blocked users - if self.request.session['blocked']: - return 'blocked' - - body = self.request.json_body - - jm = JobManager(self.settings, self.request.session) - jobid = jm.saveStartSparqlJob("Module "+body['name']) - - try: - - mm = ModulesManager(self.settings, self.request.session) - check = bool(body['checked']) - - mm.manageModules( - self.request.host_url, - body['uri'], - body['name'], - check) - - jm.updateEndSparqlJob(jobid,"Done",nr=0) - jm.updatePreviewJob(jobid,body['name'] + " ["+ str(body['checked'])+ "] done.") - - except Exception as e: - traceback.print_exc(file=sys.stdout) - jm.updateEndSparqlJob(jobid,"Error") - jm.updatePreviewJob(jobid,'Problem whith module '+body['name']+'.
'+str(e)) - - return self.data - @view_config(route_name='sparqlquery', request_method='POST') def get_value(self): """ Build a request from a json whith the following contents :variates,constraintesRelations,constraintesFilters""" @@ -1100,8 +1046,10 @@ def uploadCsv(self): def deletCsv(self): pm = ParamManager(self.settings, self.request.session) - os.remove(pm.get_user_csv_directory()+self.request.matchdict['name']), - + try: + os.remove(pm.get_user_csv_directory()+self.request.matchdict['name']), + except Exception as e: + self.log.warn(str(e)) @view_config(route_name='signup', request_method='POST') diff --git a/askomics/libaskomics/EndpointManager.py b/askomics/libaskomics/EndpointManager.py index 537ba9a0..e76f4544 100644 --- a/askomics/libaskomics/EndpointManager.py +++ b/askomics/libaskomics/EndpointManager.py @@ -12,7 +12,7 @@ def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.databasename = "endpoints.db" - self.pathdb = self.get_db_directory()+self.databasename + self.pathdb = self.get_db_directory()+"/"+self.databasename self.log.info(" ==> "+ self.pathdb +"<=="); conn = sqlite3.connect("file:"+self.pathdb,uri=True) @@ -41,7 +41,7 @@ def __init__(self, settings, session): conn.close() def saveEndpoint(self,name,url,auth,isenable): - + print("========================== SAVE ENDPOINT ===============================") conn = sqlite3.connect(self.pathdb,uri=True) c = conn.cursor() diff --git a/askomics/libaskomics/ModulesManager.py b/askomics/libaskomics/ModulesManager.py deleted file mode 100644 index bab285e6..00000000 --- a/askomics/libaskomics/ModulesManager.py +++ /dev/null @@ -1,396 +0,0 @@ -# -*- coding: utf-8 -*- -# -""" -select distinct ?a where { -GRAPH ?g { -?a ?b ?c. -} -VALUES ?g {} -} - -DELETE WHERE { -GRAPH { - ?p ?o - } - } - -""" - -import os.path -import traceback,sys -import json -import glob -import tempfile - -from askomics.libaskomics.ParamManager import ParamManager -from askomics.libaskomics.rdfdb.SparqlQueryBuilder import SparqlQueryBuilder -from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher -from askomics.libaskomics.source_file.SourceFileTtl import SourceFileTtl - -class ModulesManager(ParamManager): - """ - Manage Askomics modules : list/import/remove - """ - def __init__(self, settings, session): - ''' - Manage Modules Askomics - ''' - ParamManager.__init__(self, settings, session) - - ''' - All modules have to be composed with thes keys - ''' - self.latt = ['module','comment','version','owl','rdf'] - - self.moduleFiles = {} - self.graph_modules="askomics:graph:module" - self.modulesdir='askomics/static/modules/' - self.data = {} - - def loadAvailableMo(self): - ''' - - Initialize self.module with all .mod file find in the rith directory - Each module have to defined with : - - name : module name - - comment : comment describing the module - - version : version of this module - - graph : graph where data have to saved in the TPS - - owl : OWL file to upload in the TPS - - rdf : Askomics asbtraction - - ''' - - if not os.path.isdir(self.modulesdir): - self.log.debug('ca not find module directory: '+self.modulesdir) - self.data['error'] = 'can not find module directory ['+self.modulesdir+'] on server !' - return - - lfiles = glob.glob(self.modulesdir+"*.mo") - - for fil in lfiles: - try: - self.log.debug('reading '+fil) - a = json.loads(open(fil).read()) - self.moduleFiles[a['module']] = a - except Exception as e: - raise ValueError("Error in the module file ["+fil+"] : "+str(e)) - - - def saveMo(self,modulename): - ''' - ''' - - if not os.path.isdir(self.modulesdir): - self.log.debug('ca not find module directory: '+self.modulesdir) - self.data['error'] = 'ca not find module directory ['+self.modulesdir+'] on server !' - return - - lfiles = glob.glob(self.modulesdir+"*.mo") - allfilesnames = [] - for fil in lfiles: - try: - self.log.debug('reading '+fil) - a = json.loads(open(fil).read()) - allfilesnames.append(a['module']) - if a['module'] == modulename: - with open(fil, 'w') as outfile: - if not a['module'] in self.moduleFiles: - raise ValueError("Devel error : "+a['module']+" is not in moduleFiles :"+self.moduleFiles) - json.dump(self.moduleFiles[a['module']],outfile, sort_keys=True,indent=4,separators=(',', ': ')) - #json.dumps(self.moduleFiles[a['module']]) - return - - except Exception as e: - #traceback.print_exc(file=sys.stdout) - #self.log.error(str(e)) - raise ValueError("Error in the module file ["+fil+"] : "+str(e)) - - raise ValueError("Can not find module ["+ modulename +"] present in "+self.modulesdir+" ->"+' '.join(allfilesnames)) - - - def checkMo(self): - ''' - Check validity of modules finded in the module directory - ''' - - for module in self.moduleFiles: - if not all( att in self.moduleFiles[module] for att in self.latt ) : - self.log.debug('bad construction of module name : '+module) - self.moduleFiles[module] = None - raise ValueError("Module ["+ module +"] Miss one of these keys :"+str(self.latt)) - - def deleteMoState(self,urimo): - self.log.debug(' ***** Delete module '+urimo+' on TPS ***** ') - sqb = SparqlQueryBuilder(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - - ql.execute_query(sqb.prepare_query( - """ - DELETE WHERE { GRAPH <"""+self.graph_modules+"""> { <"""+urimo+"""> ?p ?o } } - """ - ).query) - - def importMoSate(self,mo,state): - ''' - Import in the TPS all triplet necessary to defined an askomics module - ''' - - rdf = ":"+self.escape['entity'](mo['module'])+" rdfs:label " + self.escape['text'](mo['module'])+";\n" - rdf += " rdfs:comment " + self.escape['text'](mo['comment'])+";\n" - rdf += " :module_version " + self.escape['text'](mo['version'])+";\n" - rdf += " :module_state " + self.escape['text'](state)+"" - if (state == 'ok'): - rdf += ";\n :module_graph " + '<'+mo['graph']+'>.\n' - else: - rdf += ".\n" - - sqb = SparqlQueryBuilder(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - sh = sqb.header_sparql_config('') - - ql.insert_data(rdf, self.graph_modules , sh) - - def moStateOnTPS(self): - ''' - check if module files state is saved on the TPS. - if not all modules files are saved with the unchecked status ! - ''' - sqb = SparqlQueryBuilder(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - results = ql.process_query(sqb.build_query_on_the_fly({ - 'select': '?uri ?module ?comment ?version ?graph ?state', - 'from' : [self.graph_modules], - 'query': '{\n'+ - '?uri rdfs:label ?module .\n'+ - '?uri rdfs:comment ?comment .\n'+ - '?uri :module_version ?version .\n'+ - '?uri :module_state ?state .\n'+ - 'OPTIONAL { ?uri :module_graph ?graph . } \n'+ - '}\n' - }, True).query) - - self.log.debug(' ***** module on TPS ***** ') - listMoOnTps = {} - for result in results: - result['checked'] = (result['state'] == "ok") - result['wait'] = (result['state'] == "wait") - listMoOnTps[result['module']] = 0 - self.log.debug('module : '+result['module']) - #=======================*************** A ENLEVER *********======================================= - #pour debugger - #if result['wait'] : - # result['wait'] = False - #============================================================== - - self.log.debug(' ***** check Available Modules ***** ') - - requestAgain = False - - for mo in self.moduleFiles: - self.log.debug(" --> module "+mo); - if mo not in listMoOnTps: - self.log.debug(" --====== > new module < ======="); - self.importMoSate(self.moduleFiles[mo],'off') - requestAgain = True - - if requestAgain : - return False - - return results - - def getListModules(self): - ''' - ''' - self.loadAvailableMo() - self.checkMo() - - d = self.moStateOnTPS() - #manage new database - if d == False: - d = self.moStateOnTPS() - return d - - def generateAbstractAskomicsRDF(self, graph): - ''' - ''' - sqb = SparqlQueryBuilder(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - results = ql.process_query(sqb.build_query_on_the_fly({ - 'select': '?entityDom ?entityDomLab ?relation ?entityRan ?entityRanLab', - 'query': '{\n'+ - 'GRAPH ?g { \n' + - '?relation a owl:ObjectProperty.\n'+ - '?relation rdfs:domain ?entityDom.\n'+ - '?entityDom a owl:Class .\n'+ - 'OPTIONAL { ?entityDom rdfs:label ?entityDomLab }.\n'+ - '?relation rdfs:range ?entityRan .\n'+ - '?entityRan a owl:Class .\n'+ - 'OPTIONAL { ?entityRan rdfs:label ?entityRanLab }.\n'+ - 'FILTER ( isIRI(?entityDom)).\n ' + - 'FILTER ( isIRI(?entityRan)).\n ' + - '}\n'+ - 'VALUES ?g {<'+graph+'>}' - '}\n' - }, True).query) - - entities = {} - attributes = {} - label = {} - - for r in results: - if r['entityDom'] not in entities: - entities[r['entityDom']] = {} - - if r['entityRan'] not in entities: - entities[r['entityRan']] = {} - - entities[r['entityDom']][r['relation']] = r['entityRan'] - - if ('entityDomLab' in r) and (r['entityDom'] not in label): - if r['entityDomLab'] != '': - label[r['entityDom']] = r['entityDomLab'] - if ('entityRanLab' in r) and (r['entityRan'] not in label): - if r['entityRan'] != '': - label[r['entityRan']] = r['entityRanLab'] - - - if len(entities)>0: - values = "" - for ent in entities: - values += '<'+ent+'> ' - - results = ql.process_query(sqb.build_query_on_the_fly({ - 'select': '?entity ?attribute ?basetype', - 'query': '{\n'+ - 'GRAPH ?g { \n' + - '?attribute a owl:DatatypeProperty.\n'+ - '?attribute rdfs:domain ?entity.\n'+ - '?entity a owl:Class .\n'+ - '?attribute rdfs:range ?basetype .\n'+ - 'FILTER ( isIRI(?basetype)).\n ' + - 'VALUES ?entity {'+values+'}.\n ' + - '}\n'+ - 'VALUES ?g {<'+graph+'>}' - '}\n' - }, True).query) - - for r in results: - if r['entity'] not in attributes: - attributes[r['entity']] = {} - attributes[r['entity']][r['attribute']] = r['basetype'] - - rdftab = [] - - rdftab.append("@prefix displaySetting: <"+self.ASKOMICS_prefix['displaySetting']+">.") - rdftab.append("@prefix rdfs: <"+self.ASKOMICS_prefix['rdfs']+">.") - - for ent in entities: - rdftab.append("<"+ent +"> displaySetting:entity \"true\"^^xsd:boolean.") - if ent not in label: - label = ent - - idxList = [ label.rfind('#'), label.rfind('/')] - idx = -1 - for i in idxList: - if i>idx: - idx=i - if idx>=0: - label = self.escape['text'](self.reverse_prefix(label[:idx+1])+':'+label[idx+1:]) - else: - label = self.escape['text'](label) - rdftab.append("<"+ent +"> rdfs:label "+label+"^^xsd:string.") - - if len(entities[ent])>0: - rdftab.append("<"+ ent +"> displaySetting:startPoint \"true\"^^xsd:boolean.") - if ent in attributes: - for at in attributes[ent]: - rdftab.append("<"+ at +"> displaySetting:attribute \"true\"^^xsd:boolean.") - - return rdftab - - def importRDF(self,mo,namemodule,host_url,graph=None): - self.log.debug("=============> importRDF <===================") - - if namemodule not in self.moduleFiles: - raise ValueError(namemodule+" does not exist.") - - fp = tempfile.NamedTemporaryFile(prefix="module_"+self.escape['entity'](namemodule), suffix=".ttl", mode="w", delete=False) - fp.write('\n'.join(self.moduleFiles[namemodule]['rdf'])) - fp.close() - sft = SourceFileTtl(self.settings, self.session, fp.name) - if graph != None: - sft.setGraph(graph) - else: - mo['graph'] = sft.graph - sft.persist(host_url, 'public') - - - def manageModules(self,host_url,urimodule,namemodule,active): - ''' - activate/desactivate module - ''' - self.log.debug(" --======================> manageModules <========================--- "); - self.log.debug(" uri:"+urimodule) - self.log.debug(" namemodule:"+namemodule) - self.log.debug(" active:"+str(active)) - - listMo = self.getListModules() - mo = None - for i in listMo: - if i["uri"] == urimodule: - mo = i - break - - if mo == None: - raise ValueError("Can not find Mo on TPS !") - - ########################################################################################## - if mo['state'] == 'wait': - self.log.debug(" ****************** WAIT MODE **************** :" + urimodule) - return - - self.log.debug(" delete MO state :" + urimodule) - self.deleteMoState(urimodule) - self.log.debug(" insert new MO state :"+urimodule) - self.importMoSate(mo,"wait") - ql = QueryLauncher(self.settings, self.session) - - if active: - - try: - self.importRDF(mo,namemodule,host_url) - #loading owl file - if 'owl' in self.moduleFiles[namemodule] and self.moduleFiles[namemodule]['owl'].strip() != '': - ql.load_data(self.moduleFiles[namemodule]['owl'],mo['graph']) - except Exception as e: - self.log.error('failed: ' + str(e)) - self.log.debug(" delete MO state :" + urimodule) - self.deleteMoState(urimodule) - self.log.debug(" insert new MO state :"+urimodule) - self.importMoSate(mo,"off") - raise e - - self.log.debug(" delete MO state :" + urimodule) - self.deleteMoState(urimodule) - self.log.debug(" insert new MO state :"+urimodule) - self.importMoSate(mo,"ok") - ########################################################################################## - # manage owl if dos not exist in the MO file - if 'rdf' not in self.moduleFiles[namemodule]: - self.moduleFiles[namemodule]['rdf'] = [] - if len(self.moduleFiles[namemodule]['rdf'])<=0: - self.moduleFiles[namemodule]['rdf'] = self.generateAbstractAskomicsRDF(mo['graph']) - self.importRDF(mo,namemodule,host_url,mo['graph']) - self.saveMo(namemodule) - - else: - if 'graph' in mo: - sqb = SparqlQueryBuilder(self.settings, self.session) - ql.execute_query(sqb.get_drop_named_graph(mo['graph']).query) - ql.execute_query(sqb.get_delete_metadatas_of_graph(mo['graph']).query) - - self.log.debug(" delete MO state :" + urimodule) - self.deleteMoState(urimodule) - self.log.debug(" insert new MO state :"+urimodule) - self.importMoSate(mo,"off") diff --git a/askomics/libaskomics/rdfdb/QueryLauncher.py b/askomics/libaskomics/rdfdb/QueryLauncher.py index a0b75f8e..8fbcd8b9 100755 --- a/askomics/libaskomics/rdfdb/QueryLauncher.py +++ b/askomics/libaskomics/rdfdb/QueryLauncher.py @@ -185,8 +185,9 @@ def execute_query(self, query, log_raw_results=True, externalService=None): except urllib.error.URLError as URLError: #url error, we disable the endpoint #raise ValueError(URLError.reason) - em = EndpointManager(self.settings, self.session) - em.disable(externalService['id'],str(URLError.reason)) + if externalService != None : + em = EndpointManager(self.settings, self.session) + em.disable(externalService['id'],str(URLError.reason)) results = [] #raise ValueError(URLError.reason) @@ -208,8 +209,13 @@ def parse_results(self, json_res): if json_res is None: return [] + + if type(json_res) is not dict: + return [] + if "results" not in json_res: return [] + if "bindings" not in json_res["results"]: return [] @@ -247,7 +253,7 @@ def process_query(self, query): json_query = self.execute_query(query, externalService=es, log_raw_results=False) return self.parse_results(json_query) - # call main usener dpoint askomics + # call main user endpoint askomics json_query = self.execute_query(query, log_raw_results=False) results = self.parse_results(json_query) # then other askomics endpoint defined by the user diff --git a/askomics/static/modules/biopax.ttl b/askomics/static/modules/biopax.ttl new file mode 100644 index 00000000..350581d0 --- /dev/null +++ b/askomics/static/modules/biopax.ttl @@ -0,0 +1,124 @@ +### suprimer cette premiere ligne +@prefix displaySetting: . + + +@prefix rdfs: . +@prefix xsd: . + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:GeneticInteraction"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:PathwayStep"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:EntityFeature"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Pathway"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:KPrime"^^xsd:string. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:EntityReferenceTypeVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Conversion"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:CellVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:SmallMoleculeReference"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:ModificationFeature"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Complex"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:SequenceModificationVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Provenance"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:ChemicalStructure"^^xsd:string. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:EntityReference"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:SequenceSite"^^xsd:string. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:RelationshipXref"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Evidence"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:InteractionVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:SequenceLocation"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:BiochemicalReaction"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Interaction"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:TissueVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:SequenceInterval"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Stoichiometry"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:RelationshipTypeVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:ExperimentalForm"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:PhysicalEntity"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:EvidenceCodeVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:BioSource"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:DeltaG"^^xsd:string. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:CellularLocationVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:BindingFeature"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:PhenotypeVocabulary"^^xsd:string. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:SequenceRegionVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Entity"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Catalysis"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:ExperimentalFormVocabulary"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:Score"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "biopax:BiochemicalPathwayStep"^^xsd:string. + displaySetting:startPoint "true"^^xsd:boolean." diff --git a/askomics/static/modules/gene_ontology.mo b/askomics/static/modules/gene_ontology.mo index 2ec9658c..7efa4d91 100644 --- a/askomics/static/modules/gene_ontology.mo +++ b/askomics/static/modules/gene_ontology.mo @@ -2,7 +2,7 @@ "module" : "Gene Ontology", "comment" : "Description avalaible on http://geneontology.org/page/download-ontology", "version" : "1.0", -"owl" : "http://purl.obolibrary.org/obo/go.owl", +"owl" : "", "rdf" : [ "@prefix : .", "@prefix rdfs: .", diff --git a/askomics/static/modules/go.ttl b/askomics/static/modules/go.ttl new file mode 100644 index 00000000..4824fed9 --- /dev/null +++ b/askomics/static/modules/go.ttl @@ -0,0 +1,80 @@ +### suprimer cette premiere ligne +@prefix : . + +@prefix rdfs: . +@prefix prov: . +@prefix xsd: . +@prefix rdfg: . +@prefix rdf: . +@prefix owl: . +@prefix displaySetting: . +@prefix dc: . + +@prefix obo: . +@prefix oboInOwl: . +@prefix go: . + +@base . + rdf:type owl:Ontology . + +owl:Class displaySetting:entity "true"^^xsd:boolean. + +oboInOwl:id displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:id rdf:type owl:DatatypeProperty ; + rdfs:label "ID" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +obo:IAO_0000115 displaySetting:attribute "true"^^xsd:boolean . +obo:IAO_0000115 rdf:type owl:DatatypeProperty ; + rdfs:label "IAODefinition" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +rdfs:comment displaySetting:attribute "true"^^xsd:boolean . +rdfs:comment rdf:type owl:DatatypeProperty ; + rdfs:label "Comment" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +oboInOwl:hasDefinition displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:hasDefinition rdf:type owl:DatatypeProperty ; + rdfs:label "Definition" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +oboInOwl:hasOBONamespace displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:hasOBONamespace rdf:type owl:DatatypeProperty ; + rdfs:label "OBONamespace" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +oboInOwl:hasBroadSynonym displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:hasBroadSynonym rdf:type owl:DatatypeProperty ; + rdfs:label "BroadSynonym" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +oboInOwl:hasRelatedSynonym displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:hasRelatedSynonym rdf:type owl:DatatypeProperty ; + rdfs:label "RelatedSynonym" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +oboInOwl:hasExactSynonym displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:hasExactSynonym rdf:type owl:DatatypeProperty ; + rdfs:label "ExactSynonym" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +oboInOwl:hasNarrowSynonym displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:hasNarrowSynonym rdf:type owl:DatatypeProperty ; + rdfs:label "NarrowSynonym" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . + +oboInOwl:hasDbXref displaySetting:attribute "true"^^xsd:boolean . +oboInOwl:hasDbXref rdf:type owl:DatatypeProperty ; + rdfs:label "DbXref" ; + rdfs:domain owl:Class ; + rdfs:range xsd:string . diff --git a/askomics/static/modules/prov.ttl b/askomics/static/modules/prov.ttl new file mode 100644 index 00000000..673661d0 --- /dev/null +++ b/askomics/static/modules/prov.ttl @@ -0,0 +1,41 @@ +@prefix displaySetting: . +@prefix rdfs: . + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + rdfs:label "http://www.w3.org/2002/07/owl#Thing"^^xsd:string. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:attribute "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:startPoint "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. + displaySetting:entity "true"^^xsd:boolean. diff --git a/askomics/static/src/js/core/AskomicsUserAbstraction.js b/askomics/static/src/js/core/AskomicsUserAbstraction.js index 1b163a75..2a15c892 100644 --- a/askomics/static/src/js/core/AskomicsUserAbstraction.js +++ b/askomics/static/src/js/core/AskomicsUserAbstraction.js @@ -225,7 +225,7 @@ class AskomicsUserAbstraction { if ( ! (uri4 in iua.entityPositionableInformationList) ) { iua.entityPositionableInformationList[uri4] = {}; } else { - throw new Error("URI:"+uri4+" have several taxon,ref, start, end labels... "+JSON.stringify(iua.entityPositionableInformationList[uri4])); + // throw new Error("URI:"+uri4+" have several taxon,ref, start, end labels... "+JSON.stringify(iua.entityPositionableInformationList[uri4])); } } for (let entry in resultListTripletSubjectRelationObject.subclassof){ diff --git a/askomics/static/src/js/core/IHMLocal.js b/askomics/static/src/js/core/IHMLocal.js index ceaac68b..e292fbb7 100644 --- a/askomics/static/src/js/core/IHMLocal.js +++ b/askomics/static/src/js/core/IHMLocal.js @@ -60,8 +60,6 @@ class IHMLocal { //TODO: Manage all view in a array with a generic way this.shortcutsView = new ShortcutsParametersView(); - this.moduleView = new ModulesParametersView(); - this.menus = {} ; this.menus.menuFile = new AskomicsMenu("menuFile","buttonViewFile","viewMenuFile",fileFuncMenu,false); @@ -430,6 +428,7 @@ class IHMLocal { } get_add_endpoints_form() { + console.log(" +++ get_add_endpoints_form +++"); $('#modalTitle').text('Add Askomics endpoint'); @@ -442,7 +441,7 @@ class IHMLocal { let template = AskOmics.templates.add_endpoint; let html = template(); - $('#modalMessage').html(html ); + $('#modalMessage').html(html); $('#modalButton').click(function() { @@ -455,6 +454,7 @@ class IHMLocal { service.post(model, function(data) { __ihm.loadEndpoints(); }); + $(this).unbind( "click" ); }).text('Add'); } diff --git a/askomics/static/src/js/view/integration.js b/askomics/static/src/js/view/integration.js index d0286406..47c890f3 100644 --- a/askomics/static/src/js/view/integration.js +++ b/askomics/static/src/js/view/integration.js @@ -553,8 +553,6 @@ function loadSourceFile(file_elem, pub, headers) { }); __ihm.resetStats(); - - new ModulesParametersView().updateModules(); } /** diff --git a/askomics/static/src/js/view/parameters/ModulesParametersView.js b/askomics/static/src/js/view/parameters/ModulesParametersView.js deleted file mode 100644 index 3b8c88d9..00000000 --- a/askomics/static/src/js/view/parameters/ModulesParametersView.js +++ /dev/null @@ -1,56 +0,0 @@ -/*jshint esversion: 6 */ -/*jshint multistr:true */ - -let instanceModulesParametersView ; - -class ModulesParametersView extends InterfaceParametersView { - - constructor() { - super(); - /* Implement a Singleton */ - if ( instanceModulesParametersView !== undefined ) { - return instanceModulesParametersView; - } - - this.config = {}; - this.shortcuts = {}; - instanceModulesParametersView = this; - this.updateModules(); - } - - updateModules() { - let service = new RestServiceJs("modules"); - - service.post({},function(data) { - $("#Modules_adm").empty(); - - let template = AskOmics.templates.modules; - - let context = { modules:data }; - let html = template(context); - - $("#Modules_adm").append(html); - }); - } - - active(urimo,name,bool) { - - let service = new RestServiceJs("manage_module"); - - let param = { - 'checked' : bool, - 'uri' : urimo, - 'name' : name - } ; - - service.post(param,function(data) { - new ModulesParametersView().updateModules(); - }); - - new ModulesParametersView().updateModules(); - new AskomicsJobsViewManager().wait(50).then( function() { - $("#jobsview").trigger( "click" ); - }); - } - -} diff --git a/askomics/static/src/templates/handlebars/modules.hbs b/askomics/static/src/templates/handlebars/modules.hbs deleted file mode 100644 index 30c9bccc..00000000 --- a/askomics/static/src/templates/handlebars/modules.hbs +++ /dev/null @@ -1,32 +0,0 @@ -
-

Modules

-
- - - - - - - - - - - {{#each modules}} - - - - - - - {{#if this.wait}} - - {{else}} - {{#if this.checked}} - - {{else}} - - {{/if}} - {{/if}} - - {{/each}} -
ModulesDefinitionVersionN tripletsState
{{this.module}}{{this.comment}}{{this.version}}{{this.ntriplets}}{{this.state}}
diff --git a/askomics/static/src/templates/index.pt b/askomics/static/src/templates/index.pt index fdefde5b..8508beef 100644 --- a/askomics/static/src/templates/index.pt +++ b/askomics/static/src/templates/index.pt @@ -206,12 +206,10 @@
-
diff --git a/configs/development.fuseki.ini b/configs/development.fuseki.ini index 845b3ed6..872fe4b5 100644 --- a/configs/development.fuseki.ini +++ b/configs/development.fuseki.ini @@ -17,10 +17,11 @@ askomics.debug = true # debugtoolbar.hosts = 127.0.0.1 ::1 # Triplestore configuration -#askomics.endpoint = http://localhost:3030/database/query -#askomics.updatepoint = http://localhost:3030/database/update -askomics.endpoint = http://openstack-192-168-101-155.genouest.org/fuseki/database/query -askomics.updatepoint = http://openstack-192-168-101-155.genouest.org/fuseki/database/update +askomics.endpoint = http://localhost:3030/database/query +askomics.updatepoint = http://localhost:3030/database/update +#askomics.endpoint = http://openstack-192-168-101-155.genouest.org/fuseki/database/query +#askomics.updatepoint = http://openstack-192-168-101-155.genouest.org/fuseki/database/update +askomics.fdendpoint=http://localhost:4040/test askomics.hack_virtuoso = false askomics.upload_user_data_method=insert askomics.max_content_size_to_update_database = 20000 diff --git a/configs/development.rdf4j.ini b/configs/development.rdf4j.ini index 59eea342..8414849c 100644 --- a/configs/development.rdf4j.ini +++ b/configs/development.rdf4j.ini @@ -17,8 +17,9 @@ askomics.debug = true # debugtoolbar.hosts = 127.0.0.1 ::1 # Triplestore configuration -askomics.endpoint = http://localhost:8080/rdf4j-server/repositories/SYSTEM -askomics.updatepoint = http://localhost:8080/rdf4j-server/repositories/SYSTEM/statements +#askomics.endpoint = http://localhost:8080/rdf4j-server/repositories/SYSTEM +#askomics.updatepoint = http://localhost:8080/rdf4j-server/repositories/SYSTEM/statements +askomics.endpoint = http://localhost:8080/rdf4j-server/repositories/1 askomics.hack_virtuoso = false askomics.upload_user_data_method=insert askomics.max_content_size_to_update_database = 20000 diff --git a/gulpfile.js b/gulpfile.js index 19afc839..ecc77761 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -32,7 +32,6 @@ var askomicsSourceFiles = [ 'askomics/static/src/js/objects/link/AskomicsPositionableLink.js', 'askomics/static/src/js/view/parameters/InterfaceParametersView.js', 'askomics/static/src/js/view/parameters/ShortcutsParametersView.js', - 'askomics/static/src/js/view/parameters/ModulesParametersView.js', 'askomics/static/src/js/view/AskomicsObjectView.js', 'askomics/static/src/js/view/AskomicsLinkView.js', 'askomics/static/src/js/view/AskomicsNodeView.js', From 5fde47fda01ab0fb6fc956369edb2256a1e03e79 Mon Sep 17 00:00:00 2001 From: ofilangi Date: Wed, 6 Dec 2017 17:58:58 +0100 Subject: [PATCH 007/136] refactor implementation of federation - QueryLauncher : execute query on user endpoint - MultipleQueryLauncher : execute query (independently) on each askomics endpoint - FederationQueryLauncher : execute query on Federation engine (fedx4askomics) --- askomics/ask_view.py | 117 ++++++------- askomics/libaskomics/EndpointManager.py | 36 ++++ askomics/libaskomics/Security.py | 8 +- askomics/libaskomics/TripleStoreExplorer.py | 47 +++--- .../rdfdb/FederationQueryLauncher.py | 58 +++++++ .../rdfdb/MultipleQueryLauncher.py | 64 +++++++ askomics/libaskomics/rdfdb/QueryLauncher.py | 159 ++++++++---------- .../libaskomics/rdfdb/SparqlQueryBuilder.py | 11 +- .../libaskomics/rdfdb/SparqlQueryGraph.py | 2 +- .../src/js/core/AskomicsUserAbstraction.js | 2 +- .../parameters/ShortcutsParametersView.js | 4 +- configs/development.rdf4j.ini | 4 +- 12 files changed, 324 insertions(+), 188 deletions(-) create mode 100644 askomics/libaskomics/rdfdb/FederationQueryLauncher.py create mode 100644 askomics/libaskomics/rdfdb/MultipleQueryLauncher.py diff --git a/askomics/ask_view.py b/askomics/ask_view.py index 6b9cd4f6..2d9e67ce 100644 --- a/askomics/ask_view.py +++ b/askomics/ask_view.py @@ -30,6 +30,10 @@ from askomics.libaskomics.rdfdb.SparqlQueryAuth import SparqlQueryAuth from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher +from askomics.libaskomics.rdfdb.MultipleQueryLauncher import MultipleQueryLauncher +from askomics.libaskomics.rdfdb.FederationQueryLauncher import FederationQueryLauncher + +from askomics.libaskomics.EndpointManager import EndpointManager from askomics.libaskomics.source_file.SourceFile import SourceFile from askomics.libaskomics.GalaxyConnector import GalaxyConnector @@ -98,30 +102,6 @@ def checkAdminSession(self): if not self.request.session['admin'] : raise exc.exception_response(403) - def setGraphUser(self,removeGraph=[]): - - self.settings['graph'] = {} - - #finding all private graph graph - sqg = SparqlQueryGraph(self.settings, self.request.session) - ql = QueryLauncher(self.settings, self.request.session) - - results = ql.process_query(sqg.get_user_graph_infos().query) - self.settings['graph']['private'] = [] - for elt in results: - if 'g' not in elt: - continue - if elt['g'] in removeGraph: - continue - self.settings['graph']['private'].append(elt['g']) - - #finding all public graph - results = ql.process_query(sqg.get_public_graphs().query) - self.settings['graph']['public'] = [] - for elt in results: - if elt['g'] in removeGraph: - continue - self.settings['graph']['public'].append(elt['g']) @view_config(route_name='start_point', request_method='GET') def start_points(self): @@ -130,7 +110,8 @@ def start_points(self): try: - self.setGraphUser([]) + sqb = SparqlQueryBuilder(self.settings, self.request.session) + self.settings['graph'] = sqb.getGraphUser([]) tse = TripleStoreExplorer(self.settings, self.request.session) nodes = tse.get_start_points() @@ -170,47 +151,49 @@ def statistics(self): sqs = SparqlQueryStats(self.settings, self.request.session) qlaucher = QueryLauncher(self.settings, self.request.session) + qmlaucher = MultipleQueryLauncher(self.settings, self.request.session) + em = EndpointManager(self.settings, self.request.session) public_stats = {} private_stats = {} # Number of triples - results_pub = qlaucher.process_query(sqs.get_number_of_triples('public').query) + results_pub = qmlaucher.process_query(sqs.get_number_of_triples('public').query,em.listAskomicsEndpoints()) results_priv = qlaucher.process_query(sqs.get_number_of_triples('private').query) public_stats['ntriples'] = results_pub[0]['number'] private_stats['ntriples'] = results_priv[0]['number'] # Number of entities - results_pub = qlaucher.process_query(sqs.get_number_of_entities('public').query) + results_pub = qmlaucher.process_query(sqs.get_number_of_entities('public').query,em.listAskomicsEndpoints()) results_priv = qlaucher.process_query(sqs.get_number_of_entities('private').query) public_stats['nentities'] = results_pub[0]['number'] private_stats['nentities'] = results_priv[0]['number'] # Number of classes - results_pub = qlaucher.process_query(sqs.get_number_of_classes('public').query) + results_pub = qmlaucher.process_query(sqs.get_number_of_classes('public').query,em.listAskomicsEndpoints()) results_priv = qlaucher.process_query(sqs.get_number_of_classes('private').query) public_stats['nclasses'] = results_pub[0]['number'] private_stats['nclasses'] = results_priv[0]['number'] # Number of graphs - results_pub = qlaucher.process_query(sqs.get_number_of_subgraph('public').query) + results_pub = qmlaucher.process_query(sqs.get_number_of_subgraph('public').query,em.listAskomicsEndpoints()) results_priv = qlaucher.process_query(sqs.get_number_of_subgraph('private').query) public_stats['ngraphs'] = results_pub[0]['number'] private_stats['ngraphs'] = results_priv[0]['number'] # Graphs info - results_pub = qlaucher.process_query(sqs.get_subgraph_infos('public').query) + results_pub = qmlaucher.process_query(sqs.get_subgraph_infos('public').query,em.listAskomicsEndpoints()) results_priv = qlaucher.process_query(sqs.get_subgraph_infos('private').query) public_stats['graphs'] = results_pub private_stats['graphs'] = results_priv # Classes and relations - results_pub = qlaucher.process_query(sqs.get_rel_of_classes('public').query) + results_pub = qmlaucher.process_query(sqs.get_rel_of_classes('public').query,em.listAskomicsEndpoints()) results_priv = qlaucher.process_query(sqs.get_rel_of_classes('private').query) public_stats['class_rel'] = results_pub @@ -235,7 +218,7 @@ def statistics(self): private_stats['class_rel'] = tmp # class and attributes - results_pub = qlaucher.process_query(sqs.get_attr_of_classes('public').query) + results_pub = qmlaucher.process_query(sqs.get_attr_of_classes('public').query,em.listAskomicsEndpoints()) results_priv = qlaucher.process_query(sqs.get_attr_of_classes('private').query) tmp = {} @@ -280,9 +263,9 @@ def empty_database(self): for graph in named_graphs: self.log.debug("--- DELETE GRAPH : %s", graph['g']) - ql.execute_query(sqb.get_drop_named_graph(graph['g']).query) + ql.process_query(sqb.get_drop_named_graph(graph['g']).query) #delete metadatas - ql.execute_query(sqb.get_delete_metadatas_of_graph(graph['g']).query) + ql.process_query(sqb.get_delete_metadatas_of_graph(graph['g']).query) except Exception as e: traceback.print_exc(file=sys.stdout) @@ -308,9 +291,9 @@ def delete_graph(self): for graph in graphs: self.log.debug("--- DELETE GRAPH : %s", graph) - ql.execute_query(sqb.get_drop_named_graph(graph).query) + ql.process_query(sqb.get_drop_named_graph(graph).query) #delete metadatas - ql.execute_query(sqb.get_delete_metadatas_of_graph(graph).query) + ql.process_query(sqb.get_delete_metadatas_of_graph(graph).query) @view_config(route_name='delete_endpoints', request_method='POST') @@ -389,29 +372,31 @@ def list_user_graph(self): sqg = SparqlQueryGraph(self.settings, self.request.session) query_launcher = QueryLauncher(self.settings, self.request.session) - res = query_launcher.execute_query(sqg.get_user_graph_infos_with_count().query) + res = query_launcher.process_query(sqg.get_user_graph_infos_with_count().query) named_graphs = [] - - for index_result in range(len(res['results']['bindings'])): - if not 'date' in res['results']['bindings'][index_result]: + print(res) + for index_result in range(len(res)): + if not 'date' in res[index_result]: self.log.warn('============= bad results user graph =================') - self.log.warn(res['results']['bindings'][index_result]) + self.log.warn(res[index_result]) self.log.warn("============================================================") continue - dat = datetime.datetime.strptime(res['results']['bindings'][index_result]['date']['value'], "%Y-%m-%dT%H:%M:%S.%f") + print("=============") + print(res[index_result]) + dat = datetime.datetime.strptime(res[index_result]['date'], "%Y-%m-%dT%H:%M:%S.%f") readable_date = dat.strftime("%d/%m/%Y %H:%M:%S") #dd/mm/YYYY hh:ii:ss named_graphs.append({ - 'g': res['results']['bindings'][index_result]['g']['value'], - 'name': res['results']['bindings'][index_result]['name']['value'], - 'count': res['results']['bindings'][index_result]['co']['value'], - 'date': res['results']['bindings'][index_result]['date']['value'], + 'g': res[index_result]['g'], + 'name': res[index_result]['name'], + 'count': res[index_result]['co'], + 'date': res[index_result]['date'], 'readable_date': readable_date, - 'access': res['results']['bindings'][index_result]['access']['value'], - 'owner': res['results']['bindings'][index_result]['owner']['value'], - 'access_bool': bool(res['results']['bindings'][index_result]['access']['value'] == 'public') + 'access': res[index_result]['access'], + 'owner': res[index_result]['owner'], + 'access_bool': bool(res[index_result]['access'] == 'public') }) return named_graphs @@ -480,8 +465,9 @@ def source_files_overview(self): # get all taxon in the TS sqg = SparqlQueryGraph(self.settings, self.request.session) - ql = QueryLauncher(self.settings, self.request.session) - res = ql.execute_query(sqg.get_all_taxons().query) + ql = MultipleQueryLauncher(self.settings, self.request.session) + em = EndpointManager(self.settings, self.session) + res = ql.process_query(sqg.get_all_taxons().query,em.listAskomicsEndpoints()) taxons_list = [] for elem in res['results']['bindings']: taxons_list.append(elem['taxon']['value']) @@ -671,8 +657,8 @@ def load_data_into_graph(self): #rollback sqb = SparqlQueryBuilder(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) - query_laucher.execute_query(sqb.get_drop_named_graph(src_file.graph).query) - query_laucher.execute_query(sqb.get_delete_metadatas_of_graph(src_file.graph).query) + query_laucher.proecess_query(sqb.get_drop_named_graph(src_file.graph).query) + query_laucher.proecess_query(sqb.get_delete_metadatas_of_graph(src_file.graph).query) traceback.print_exc(file=sys.stdout) jm.updateEndSparqlJob(jobid,"Error") @@ -727,8 +713,8 @@ def load_gff_into_graph(self): #rollback sqb = SparqlQueryBuilder(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) - query_laucher.execute_query(sqb.get_drop_named_graph(src_file_gff.graph).query) - query_laucher.execute_query(sqb.get_delete_metadatas_of_graph(src_file_gff.graph).query) + query_laucher.process_query(sqb.get_drop_named_graph(src_file_gff.graph).query) + query_laucher.process_query(sqb.get_delete_metadatas_of_graph(src_file_gff.graph).query) traceback.print_exc(file=sys.stdout) jm.updateEndSparqlJob(jobid,"Error") @@ -776,8 +762,8 @@ def load_ttl_into_graph(self): #rollback sqb = SparqlQueryBuilder(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) - query_laucher.execute_query(sqb.get_drop_named_graph(src_file_ttl.graph).query) - query_laucher.execute_query(sqb.get_delete_metadatas_of_graph(src_file_ttl.graph).query) + query_laucher.process_query(sqb.get_drop_named_graph(src_file_ttl.graph).query) + query_laucher.process_query(sqb.get_delete_metadatas_of_graph(src_file_ttl.graph).query) jm.updateEndSparqlJob(jobid,"Error") jm.updatePreviewJob(jobid,'Problem when integration of '+file_name+'.
'+str(e)) @@ -830,8 +816,8 @@ def load_bed_into_graph(self): #rollback sqb = SparqlQueryBuilder(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) - query_laucher.execute_query(sqb.get_drop_named_graph(src_file_bed.graph).query) - query_laucher.execute_query(sqb.get_delete_metadatas_of_graph(src_file_bed.graph).query) + query_laucher.process_query(sqb.get_drop_named_graph(src_file_bed.graph).query) + query_laucher.process_query(sqb.get_delete_metadatas_of_graph(src_file_bed.graph).query) traceback.print_exc(file=sys.stdout) jm.updateEndSparqlJob(jobid,"Error") @@ -908,7 +894,7 @@ def deleteShortcut(self): query_string += "<"+body["shortcut"]+">" + " ?r ?a.\n" query_string += "\t}\n" - res = ql.execute_query(query_string) + res = ql.process_query(query_string) except Exception as e: #exc_type, exc_value, exc_traceback = sys.exc_info() #traceback.print_exc(limit=8) @@ -947,7 +933,10 @@ def get_value(self): if 'from' in body: lfrom = body['from'] - results, query = tse.build_sparql_query_from_json(lfrom, body["variates"], body["constraintesRelations"], True) + #TODO: Faire une interface pour selectionnerles endpoints compatibles avec les graphes selectionnées + + em = EndpointManager(self.settings, self.request.session) + results, query = tse.build_sparql_query_from_json(em.listAskomicsEndpoints(),lfrom, body["variates"], body["constraintesRelations"], True) # Remove prefixes in the results table limit = int(body["limit"]) + 1 @@ -1528,8 +1517,8 @@ def delete_user(self): # Drop all this graph for graph in list_graph: try: - query_laucher.execute_query(sqb.get_drop_named_graph(graph).query) - query_laucher.execute_query(sqb.get_delete_metadatas_of_graph(graph).query) + query_laucher.process_query(sqb.get_drop_named_graph(graph).query) + query_laucher.process_query(sqb.get_delete_metadatas_of_graph(graph).query) except Exception as e: self.data['error'] = str(e) self.log.error(str(e)) @@ -1539,7 +1528,7 @@ def delete_user(self): # Delete user infos try: - query_laucher.execute_query(sqb.delete_user(username).query) + query_laucher.process_query(sqb.delete_user(username).query) except Exception as e: return 'failed: ' + str(e) diff --git a/askomics/libaskomics/EndpointManager.py b/askomics/libaskomics/EndpointManager.py index e76f4544..c906f2ac 100644 --- a/askomics/libaskomics/EndpointManager.py +++ b/askomics/libaskomics/EndpointManager.py @@ -97,6 +97,42 @@ def disable(self,id,message): conn.close() self.listEndpoints() + def listAskomicsEndpoints(self): + self.log.info(" == listEndpoints == ") + data = [] + try: + conn = sqlite3.connect(self.pathdb,uri=True) + conn.row_factory = sqlite3.Row + + c = conn.cursor() + + reqSql = """SELECT id, name, url, auth, enable, message FROM endpoints WHERE enable == 1""" + + c.execute(reqSql) + rows = c.fetchall() + self.log.info("nb row:"+str(len(rows))) + for row in rows: + + d = {} + d['id'] = row['id'] + d['name'] = row['name'] + d['endpoint'] = row['url'] + if row['auth'] != None and row['auth'] != 'NULL' : + d['auth'] = row['auth'] + else: + d['auth'] = '' + d['message'] = row['message'] + data.append(d) + + except sqlite3.OperationalError as e : + self.log.info("Endpoints database does not exist .") + + + c.execute(reqSql) + conn.commit() + conn.close() + return data + def listEndpoints(self): self.log.info(" == listEndpoints == ") data = [] diff --git a/askomics/libaskomics/Security.py b/askomics/libaskomics/Security.py index e8ab057b..356507b2 100644 --- a/askomics/libaskomics/Security.py +++ b/askomics/libaskomics/Security.py @@ -181,7 +181,7 @@ def delete_apikey(self, key): query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) - query_laucher.execute_query(sqa.delete_apikey(key).query) + query_laucher.process_query(sqa.delete_apikey(key).query) def get_number_of_users(self): @@ -372,7 +372,7 @@ def add_apikey(self, keyname): query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) - query_laucher.execute_query(sqa.add_apikey(self.username, keyname).query) + query_laucher.process_query(sqa.add_apikey(self.username, keyname).query) def add_galaxy(self, url, key): """Connect a galaxy account to Askomics @@ -395,7 +395,7 @@ def add_galaxy(self, url, key): query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) - query_laucher.execute_query(sqa.add_galaxy(self.username, url, key).query) + query_laucher.process_query(sqa.add_galaxy(self.username, url, key).query) def get_galaxy_infos(self): """Get Galaxy url and apikey of a user""" @@ -426,4 +426,4 @@ def delete_galaxy(self): query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) - query_laucher.execute_query(sqa.delete_galaxy(self.username).query) + query_laucher.process_query(sqa.delete_galaxy(self.username).query) diff --git a/askomics/libaskomics/TripleStoreExplorer.py b/askomics/libaskomics/TripleStoreExplorer.py index 6449f75a..e7c534ac 100644 --- a/askomics/libaskomics/TripleStoreExplorer.py +++ b/askomics/libaskomics/TripleStoreExplorer.py @@ -8,8 +8,10 @@ from askomics.libaskomics.rdfdb.SparqlQueryBuilder import SparqlQueryBuilder from askomics.libaskomics.rdfdb.SparqlQueryGraph import SparqlQueryGraph -from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher +from askomics.libaskomics.rdfdb.MultipleQueryLauncher import MultipleQueryLauncher +from askomics.libaskomics.rdfdb.FederationQueryLauncher import FederationQueryLauncher +from askomics.libaskomics.EndpointManager import EndpointManager class TripleStoreExplorer(ParamManager): """ @@ -39,9 +41,11 @@ def get_start_points(self): nodes = [] sqg = SparqlQueryGraph(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - results = ql.process_query(sqg.get_public_start_point().query) - results += ql.process_query(sqg.get_user_start_point().query) + ql = MultipleQueryLauncher(self.settings, self.session) + em = EndpointManager(self.settings, self.session) + + results = ql.process_query(sqg.get_public_start_point().query,em.listAskomicsEndpoints()) + results += ql.process_query(sqg.get_user_start_point().query,em.listAskomicsEndpoints()) for result in results: g = result["g"] @@ -70,18 +74,19 @@ def getUserAbstraction(self): self.log.debug(" =========== TripleStoreExplorer:getUserAbstraction ===========") sqg = SparqlQueryGraph(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - - data['relations'] = ql.process_query(sqg.get_public_abstraction_relation('owl:ObjectProperty').query) - data['relations'] += ql.process_query(sqg.get_user_abstraction_relation('owl:ObjectProperty').query) - data['subclassof'] = ql.process_query(sqg.get_isa_relation_entities().query) - data['entities'] = ql.process_query(sqg.get_public_abstraction_entity().query) - data['entities'] += ql.process_query(sqg.get_user_abstraction_entity().query) - data['attributes'] = ql.process_query(sqg.get_public_abstraction_attribute_entity().query) - data['attributes'] += ql.process_query(sqg.get_user_abstraction_attribute_entity().query) - data['categories'] = ql.process_query(sqg.get_public_abstraction_category_entity().query) - data['categories'] += ql.process_query(sqg.get_user_abstraction_category_entity().query) - data['positionable'] = ql.process_query(sqg.get_abstraction_positionable_entity().query) + ql = MultipleQueryLauncher(self.settings, self.session) + em = EndpointManager(self.settings, self.session) + + data['relations'] = ql.process_query(sqg.get_public_abstraction_relation('owl:ObjectProperty').query,em.listAskomicsEndpoints()) + data['relations'] += ql.process_query(sqg.get_user_abstraction_relation('owl:ObjectProperty').query,em.listAskomicsEndpoints()) + data['subclassof'] = ql.process_query(sqg.get_isa_relation_entities().query,em.listAskomicsEndpoints()) + data['entities'] = ql.process_query(sqg.get_public_abstraction_entity().query,em.listAskomicsEndpoints()) + data['entities'] += ql.process_query(sqg.get_user_abstraction_entity().query,em.listAskomicsEndpoints()) + data['attributes'] = ql.process_query(sqg.get_public_abstraction_attribute_entity().query,em.listAskomicsEndpoints()) + data['attributes'] += ql.process_query(sqg.get_user_abstraction_attribute_entity().query,em.listAskomicsEndpoints()) + data['categories'] = ql.process_query(sqg.get_public_abstraction_category_entity().query,em.listAskomicsEndpoints()) + data['categories'] += ql.process_query(sqg.get_user_abstraction_category_entity().query,em.listAskomicsEndpoints()) + data['positionable'] = ql.process_query(sqg.get_abstraction_positionable_entity().query,em.listAskomicsEndpoints()) data['graph'] = sqg.getGraphUser() return data @@ -116,7 +121,7 @@ def build_recursive_block(self, tabul, constraints): return req return "" - def build_sparql_query_from_json(self, fromgraphs, variates, constraintes_relations,limit, send_request_to_tps=True): + def build_sparql_query_from_json(self, listEndpoints, fromgraphs, variates, constraintes_relations,limit, send_request_to_tps=True): """ Build a sparql query from JSON constraints """ @@ -126,11 +131,12 @@ def build_sparql_query_from_json(self, fromgraphs, variates, constraintes_relati sqb = SparqlQueryBuilder(self.settings, self.session) query = self.build_recursive_block('', constraintes_relations) + # if limit != None and limit > 0: # query += ' LIMIT ' + str(limit) if send_request_to_tps: - query_launcher = QueryLauncher(self.settings, self.session,federationRequest=True,external_lendpoints=[]) + query_launcher = FederationQueryLauncher(self.settings, self.session,listEndpoints) results = query_launcher.process_query(sqb.custom_query(fromgraphs, select, query).query) else: results = [] @@ -139,8 +145,9 @@ def build_sparql_query_from_json(self, fromgraphs, variates, constraintes_relati def get_prefix_uri(self): sqg = SparqlQueryGraph(self.settings, self.session) - ql = QueryLauncher(self.settings, self.session) - rs = ql.process_query(sqg.get_prefix_uri().query) + ql = MultipleQueryLauncher(self.settings, self.session) + em = EndpointManager(self.settings, self.session) + rs = ql.process_query(sqg.get_prefix_uri().query,em.listAskomicsEndpoints()) results = {} r_buf = {} diff --git a/askomics/libaskomics/rdfdb/FederationQueryLauncher.py b/askomics/libaskomics/rdfdb/FederationQueryLauncher.py new file mode 100644 index 00000000..13f3a628 --- /dev/null +++ b/askomics/libaskomics/rdfdb/FederationQueryLauncher.py @@ -0,0 +1,58 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- +import os, time, tempfile +import re +import csv +from pprint import pformat +from SPARQLWrapper import SPARQLWrapper, JSON +import requests +import logging +import urllib.request + +from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher + +class FederationQueryLauncher(QueryLauncher): + """ + The QueryLauncher process sparql queries: + - execute_query send the query to the sparql endpoint specified in params. + - parse_results preformat the query results + - format_results_csv write in the tabulated result file a table obtained + from these preformated results using a ResultsBuilder instance. + """ + + def __init__(self, settings, session,lendpoints): + QueryLauncher.__init__(self, settings, session) + self.log = logging.getLogger(__name__) + + + self.log.info(" =================== Federation Request ===================="); + + #comments added in sparql request to get all url endpoint. + self.commentsForFed="" + for endp in lendpoints: + self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' + #add local TPS + self.commentsForFed+="#endpoint,local,"+self.get_param("askomics.endpoint")+',false\n' + + if not self.is_defined("askomics.fdendpoint") : + raise ValueError("can not find askomics.fdendpoint property in the config file !") + + self.name = 'FederationEngine' + self.endpoint = self.get_param("askomics.fdendpoint") + self.username = None + self.password = None + self.urlupdate = None + self.auth = 'Basic' + self.allowUpdate = False + + def process_query(self, query): + ''' + Execute query and parse the results if exist + ''' + + # Federation Request case + #------------------------------------------------------ + + query = self.commentsForFed + query + json_query = self._execute_query(query,log_raw_results=False) + return self.parse_results(json_query) diff --git a/askomics/libaskomics/rdfdb/MultipleQueryLauncher.py b/askomics/libaskomics/rdfdb/MultipleQueryLauncher.py new file mode 100644 index 00000000..65f44e6a --- /dev/null +++ b/askomics/libaskomics/rdfdb/MultipleQueryLauncher.py @@ -0,0 +1,64 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- +import os, time, tempfile +import re +import csv +from pprint import pformat +from SPARQLWrapper import SPARQLWrapper, JSON +import requests +import logging +import urllib.request + +from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher + +class MultipleQueryLauncher(QueryLauncher): + """ + The MultipleQueryLauncher process sparql queries. Send SPARQL query on multiple endpoint. + Useful to reach several AskOmics Endpoint + """ + + def __init__(self, settings, session): + QueryLauncher.__init__(self, settings, session) + self.log = logging.getLogger(__name__) + self.log.info(" =================== Multiple Query Lancher Request ===================="); + + + def process_query(self,query,lendpoints): + ''' + Execute query and parse the results if exist + ''' + + # Request on local Askomics + self.setUserDatastore() + json_query = self._execute_query(query, log_raw_results=False) + results = self.parse_results(json_query) + + # then other askomics endpoint defined by the user + for es in lendpoints: + if 'name' not in es : + raise ValueError("Devel error : define 'name' Attribute :"+str(es)) + if 'endpoint' not in es : + raise ValueError("Devel error : define 'endpoint' Attribute :"+str(es)) + + self.name = es['name'] + self.endpoint = es['endpoint'] + self.auth = 'Basic' + self.username = None + self.password = None + self.auth = 'Basic' + + if 'username' in es : + self.username = es['username'] + if 'password' in es : + self.password = es['password'] + if 'auth' in es : + self.auth = es['auth'] + + self.urlupdate = None + + self.allowUpdate = False + + json_query = self._execute_query(query,log_raw_results=False) + results += self.parse_results(json_query) + + return results diff --git a/askomics/libaskomics/rdfdb/QueryLauncher.py b/askomics/libaskomics/rdfdb/QueryLauncher.py index 8fbcd8b9..d7a72704 100755 --- a/askomics/libaskomics/rdfdb/QueryLauncher.py +++ b/askomics/libaskomics/rdfdb/QueryLauncher.py @@ -10,7 +10,6 @@ import urllib.request from askomics.libaskomics.ParamManager import ParamManager -from askomics.libaskomics.EndpointManager import EndpointManager class SPARQLError(RuntimeError): """ @@ -29,41 +28,44 @@ class QueryLauncher(ParamManager): from these preformated results using a ResultsBuilder instance. """ - def __init__(self, settings, session,federationRequest=False,external_lendpoints=None): + def __init__(self, settings, session, name = None, endpoint = None ,username=None, password=None,urlupdate=None,auth='Basic'): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) - #comments added in sparql request to get all url endpoint. - self.commentsForFed="" - em = EndpointManager(settings, session) - self.lendpoints = [] - - if federationRequest: - lendpoints = [] - if external_lendpoints: - lendpoints = external_lendpoints - lendpoints += em.listActiveEndpoints() - if len(lendpoints)==0 : - # no need federation - return - i=0 - for endp in lendpoints: - i+=1 - #self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' - self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' - #add local TPS - self.commentsForFed+="#endpoint,local,"+self.get_param("askomics.endpoint")+',false\n' - - d = {} - d['name'] = 'Federation request' - if not self.is_defined("askomics.fdendpoint") : - raise Exception("can not find askomics.fdendpoint property in the config file !") - d['endpoint'] = self.get_param("askomics.fdendpoint") - d['enable'] = True - d['id'] = 0 - d['auth'] = 'basic' - self.lendpoints.append(d) + + self.name = name + self.endpoint = endpoint + self.username = username + self.password = password + self.urlupdate = urlupdate + self.auth = auth + self.allowUpdate = False + + if self.auth != 'Basic' and self.auth != 'Digest': + raise ValueError("Invalid Auth parameter :"+self.auth) + + def setUserDatastore(self): + """ + initialize endpoint with user configuration file + """ + self.name = 'Local' + self.allowUpdate = True + + if self.is_defined("askomics.endpoint"): + self.endpoint = self.get_param("askomics.endpoint") else: - self.lendpoints = em.listActiveEndpoints() + raise ValueError("askomics.endpoint does not exit.") + + if self.is_defined("askomics.updatepoint"): + self.urlupdate = self.get_param("askomics.updatepoint") + + if self.is_defined("askomics.endpoint_username"): + self.username = self.get_param("askomics.endpoint_username") + + if self.is_defined("askomics.endpoint_passwd"): + self.password = self.get_param("askomics.endpoint_passwd") + + if self.is_defined("askomics.endpoint.auth"): + self.auth = self.get_param("askomics.endpoint.auth") def setup_opener(self, proxy_config): """ @@ -101,17 +103,30 @@ def setup_opener(self, proxy_config): self.opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(self.opener) - def execute_query(self, query, log_raw_results=True, externalService=None): + def setupSPARQLWrapper(self): + """ + Setup SPARQLWrapper to reach url endpoint + """ + data_endpoint = SPARQLWrapper(self.endpoint,self.urlupdate) + + if self.username and self.password: + data_endpoint.setCredentials(self.username, self.password) + elif self.username: + raise ValueError("passwd is not defined") + elif self.password: + raise ValueError("username is not defined") + + data_endpoint.setHTTPAuth(self.auth) # Basic or Digest + + return data_endpoint + + def _execute_query(self, query, log_raw_results=True, externalService=None): """Params: - libaskomics.rdfdb.SparqlQuery - log_raw_results: if True the raw json response is logged. Set to False if you're doing a select and parsing the results with parse_results. """ - #query = "#endpoint,askomics,http://localhost:8890/sparql/,false\n"+\ - # "#endpoint,regine,http://openstack-192-168-100-46.genouest.org/virtuoso/sparql,false\n"+\ - # query - # Proxy handling if self.is_defined("askomics.proxy"): proxy_config = self.get_param("askomics.proxy") @@ -127,48 +142,14 @@ def execute_query(self, query, log_raw_results=True, externalService=None): time0 = time.time() - if externalService is None : - urlupdate = None - if self.is_defined("askomics.updatepoint"): - urlupdate = self.get_param("askomics.updatepoint") - - if self.is_defined("askomics.endpoint"): - data_endpoint = SPARQLWrapper(self.get_param("askomics.endpoint"), urlupdate) - else: - raise ValueError("askomics.endpoint") - - if self.is_defined("askomics.endpoint_username") and self.is_defined("askomics.endpoint_passwd"): - user = self.get_param("askomics.endpoint_username") - passwd = self.get_param("askomics.endpoint_passwd") - data_endpoint.setCredentials(user, passwd) - elif self.is_defined("askomics.endpoint_username"): - raise ValueError("askomics.endpoint_passwd is not defined") - elif self.is_defined("askomics.endpoint_passwd"): - raise ValueError("askomics.endpoint_username is not defined") - - if self.is_defined("askomics.endpoint.auth"): - data_endpoint.setHTTPAuth(self.get_param("askomics.endpoint.auth")) # Basic or Digest - else: - if not externalService['enable'] : - self.log.debug("externalService "+externalService['name']+'('+externalService['endpoint']+') is disabled.') - return [] - - urlupdate = None - if 'updatepoint' in externalService: - data_endpoint = SPARQLWrapper(externalService['endpoint'], urlupdate) - else: - data_endpoint = SPARQLWrapper(externalService['endpoint']) - - if 'auth' in externalService: - data_endpoint.setHTTPAuth(externalService['auth']); - + data_endpoint = self.setupSPARQLWrapper() data_endpoint.setQuery(query) data_endpoint.method = 'POST' - if externalService != None and data_endpoint.isSparqlUpdateRequest(): - raise ValueError("Can not update a remote endpoint with url:"+str(externalService)) - if data_endpoint.isSparqlUpdateRequest(): + if not self.allowUpdate : + raise ValueError("Can not perform an update sparql request on an external endpoint.") + data_endpoint.setMethod('POST') # Hack for Virtuoso to LOAD a turtle file if self.is_defined("askomics.hack_virtuoso"): @@ -208,10 +189,11 @@ def parse_results(self, json_res): ''' if json_res is None: - return [] + raise ValueError("Communication was broken betwwen askomics and datastore.") if type(json_res) is not dict: - return [] + self.log.debug(str(json_res)) + raise ValueError("Invalide format response from datastore .") if "results" not in json_res: return [] @@ -245,21 +227,12 @@ def process_query(self, query): Execute query and parse the results if exist ''' - # Federation Request case - #------------------------------------------------------ - if self.commentsForFed != '': - query = self.commentsForFed + query - es = self.lendpoints[0] - json_query = self.execute_query(query, externalService=es, log_raw_results=False) - return self.parse_results(json_query) + # if no endpoint are configured, set local datastore + if not self.endpoint: + self.setUserDatastore() - # call main user endpoint askomics - json_query = self.execute_query(query, log_raw_results=False) + json_query = self._execute_query(query, log_raw_results=False) results = self.parse_results(json_query) - # then other askomics endpoint defined by the user - for es in self.lendpoints: - json_query = self.execute_query(query, externalService=es, log_raw_results=False) - results += self.parse_results(json_query) return results @@ -307,7 +280,7 @@ def load_data(self, url, graphName): self.log.debug("Loading into triple store (LOAD method) the content of: %s", url) query_string = "LOAD <"+url+"> INTO GRAPH"+ " <" + graphName + ">" - res = self.execute_query(query_string) + res = self._execute_query(query_string) return res @@ -363,6 +336,6 @@ def insert_data(self, ttl_string, graph, ttl_header=""): query_string += "\t\t}\n" query_string += "\t}\n" - res = self.execute_query(query_string) + res = self._execute_query(query_string) return res diff --git a/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py b/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py index 2a54de7f..b579c7ec 100644 --- a/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py +++ b/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py @@ -6,7 +6,9 @@ from askomics.libaskomics.rdfdb.SparqlQuery import SparqlQuery from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher +from askomics.libaskomics.rdfdb.MultipleQueryLauncher import MultipleQueryLauncher from askomics.libaskomics.ParamManager import ParamManager +from askomics.libaskomics.EndpointManager import EndpointManager # from askomics.libaskomics.utils import prefix_lines class SparqlQueryBuilder(ParamManager): @@ -44,14 +46,17 @@ def getGraphUser(self,removeGraph=[]): settings['private'].append(elt['g']) self.log.debug("setting['private']:\n"+str(settings['private'])) - #finding all public graph + #finding all public graph on all Askomics endpoint qu = self.build_query_on_the_fly({ 'select': '?g', 'query': 'GRAPH ?g {\n'+ "?g :accessLevel 'public'. } " }, True) - ql = QueryLauncher(self.settings, self.session) - results = ql.process_query(qu.query) + + ql = MultipleQueryLauncher(self.settings, self.session) + em = EndpointManager(self.settings, self.session) + + results = ql.process_query(qu.query,em.listAskomicsEndpoints()) settings['public'] = [] for elt in results: if elt['g'] in removeGraph: diff --git a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py index 6c5f2e23..52162b92 100644 --- a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py +++ b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py @@ -137,7 +137,7 @@ def get_user_graph_infos_with_count(self): '\t?g prov:wasDerivedFrom ?name .\n'+ '\t?g :accessLevel ?access .\n' + '}', - 'post_action': 'GROUP BY ?g' + 'post_action': 'GROUP BY ?g ?name ?date ?access ?owner' }, True) def get_if_positionable(self, uri): diff --git a/askomics/static/src/js/core/AskomicsUserAbstraction.js b/askomics/static/src/js/core/AskomicsUserAbstraction.js index 2a15c892..c6274fa5 100644 --- a/askomics/static/src/js/core/AskomicsUserAbstraction.js +++ b/askomics/static/src/js/core/AskomicsUserAbstraction.js @@ -148,7 +148,7 @@ class AskomicsUserAbstraction { /* All relation are stored in tripletSubjectRelationObject */ iua.tripletSubjectRelationObject = resultListTripletSubjectRelationObject.relations; /* == External Service can add external relation == */ - //console.log("RELATIONS::"+JSON.stringify(iua.tripletSubjectRelationObject)); + console.log("ALL::"+JSON.stringify(resultListTripletSubjectRelationObject)); iua.entityInformationList = {}; iua.entityPositionableInformationList = {}; diff --git a/askomics/static/src/js/view/parameters/ShortcutsParametersView.js b/askomics/static/src/js/view/parameters/ShortcutsParametersView.js index 0f1a4e05..e79d43f2 100644 --- a/askomics/static/src/js/view/parameters/ShortcutsParametersView.js +++ b/askomics/static/src/js/view/parameters/ShortcutsParametersView.js @@ -21,7 +21,7 @@ class ShortcutsParametersView extends InterfaceParametersView { } setShortcut(sparql_res) { - + if ( ! (sparql_res.shortcut in this.shortcuts) ) { this.shortcuts[sparql_res.shortcut] = {}; this.shortcuts[sparql_res.shortcut].label = sparql_res.label; @@ -150,6 +150,8 @@ class ShortcutsParametersView extends InterfaceParametersView { let service = new RestServiceJs("sparqlquery"); let param = { 'export' : false, + 'endpoint' : [], // TODO manage endpoint from IHM ? + 'graphs' : ['askomics:graph:shortcut'], 'variates' : ["?graph","?shortcut","?label","?comment","?version","?in","?out","?output_var","?output_varname","?prefix_string","?sparql_string"], 'constraintesRelations': [[["?shortcut a :shortcuts", "?shortcut rdfs:label ?label", diff --git a/configs/development.rdf4j.ini b/configs/development.rdf4j.ini index 8414849c..1d70bc8b 100644 --- a/configs/development.rdf4j.ini +++ b/configs/development.rdf4j.ini @@ -19,7 +19,9 @@ askomics.debug = true # Triplestore configuration #askomics.endpoint = http://localhost:8080/rdf4j-server/repositories/SYSTEM #askomics.updatepoint = http://localhost:8080/rdf4j-server/repositories/SYSTEM/statements -askomics.endpoint = http://localhost:8080/rdf4j-server/repositories/1 +askomics.endpoint = http://openstack-192-168-100-48.genouest.org/rdf4j/rdf4j-server/repositories/SYSTEM +askomics.updatepoint = http://openstack-192-168-100-48.genouest.org/rdf4j/rdf4j-server/repositories/SYSTEM/statements +askomics.fdendpoint=http://localhost:4040/test askomics.hack_virtuoso = false askomics.upload_user_data_method=insert askomics.max_content_size_to_update_database = 20000 From bd365a49ccbe6d30bb2231f5abb02e5e843c8e22 Mon Sep 17 00:00:00 2001 From: ofilangi Date: Fri, 8 Dec 2017 15:07:50 +0100 Subject: [PATCH 008/136] - manage Federation/Multiple Query/Query on several remote endpoint --- askomics/__init__.py | 2 +- askomics/ask_view.py | 38 +++++--- askomics/libaskomics/TripleStoreExplorer.py | 37 +++++++- .../rdfdb/FederationQueryLauncher.py | 2 +- .../rdfdb/MultipleQueryLauncher.py | 15 ++- askomics/libaskomics/rdfdb/QueryLauncher.py | 8 +- .../libaskomics/rdfdb/SparqlQueryBuilder.py | 39 +++++--- .../libaskomics/rdfdb/SparqlQueryGraph.py | 16 +--- .../libaskomics/source_file/SourceFile.py | 7 +- .../src/js/core/AskomicsForceLayoutManager.js | 5 +- .../src/js/core/AskomicsGraphBuilder.js | 85 ++++++++++++----- askomics/static/src/js/core/AskomicsMenu.js | 15 ++- .../src/js/core/AskomicsUserAbstraction.js | 94 ++++++++++++++++--- .../src/js/services/AskomicsJobsManager.js | 8 +- .../static/src/js/view/AskomicsNodeView.js | 8 +- askomics/static/src/js/view/integration.js | 1 + .../parameters/ShortcutsParametersView.js | 2 +- 17 files changed, 276 insertions(+), 106 deletions(-) diff --git a/askomics/__init__.py b/askomics/__init__.py index 318b4335..dbbe67da 100644 --- a/askomics/__init__.py +++ b/askomics/__init__.py @@ -8,7 +8,7 @@ def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ - my_session_factory = SignedCookieSessionFactory('itsaseekreet',timeout=1200) + my_session_factory = SignedCookieSessionFactory('itsaseekreet',timeout=2400) config = Configurator(settings=settings) config.set_session_factory(my_session_factory) diff --git a/askomics/ask_view.py b/askomics/ask_view.py index 2d9e67ce..7558fed8 100644 --- a/askomics/ask_view.py +++ b/askomics/ask_view.py @@ -387,8 +387,13 @@ def list_user_graph(self): dat = datetime.datetime.strptime(res[index_result]['date'], "%Y-%m-%dT%H:%M:%S.%f") readable_date = dat.strftime("%d/%m/%Y %H:%M:%S") #dd/mm/YYYY hh:ii:ss + endpt = '' + + if 'endpoint' in res[index_result].keys(): + endpt = res[index_result]['endpoint'], named_graphs.append({ + 'endpoint' : endpt, 'g': res[index_result]['g'], 'name': res[index_result]['name'], 'count': res[index_result]['co'], @@ -466,10 +471,10 @@ def source_files_overview(self): # get all taxon in the TS sqg = SparqlQueryGraph(self.settings, self.request.session) ql = MultipleQueryLauncher(self.settings, self.request.session) - em = EndpointManager(self.settings, self.session) + em = EndpointManager(self.settings, self.request.session) res = ql.process_query(sqg.get_all_taxons().query,em.listAskomicsEndpoints()) taxons_list = [] - for elem in res['results']['bindings']: + for elem in res: taxons_list.append(elem['taxon']['value']) self.data['taxons'] = taxons_list @@ -657,8 +662,8 @@ def load_data_into_graph(self): #rollback sqb = SparqlQueryBuilder(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) - query_laucher.proecess_query(sqb.get_drop_named_graph(src_file.graph).query) - query_laucher.proecess_query(sqb.get_delete_metadatas_of_graph(src_file.graph).query) + query_laucher.process_query(sqb.get_drop_named_graph(src_file.graph).query) + query_laucher.process_query(sqb.get_delete_metadatas_of_graph(src_file.graph).query) traceback.print_exc(file=sys.stdout) jm.updateEndSparqlJob(jobid,"Error") @@ -929,15 +934,18 @@ def get_value(self): try: tse = TripleStoreExplorer(self.settings, self.request.session) - lfrom = [] - if 'from' in body: - lfrom = body['from'] - - #TODO: Faire une interface pour selectionnerles endpoints compatibles avec les graphes selectionnées - - em = EndpointManager(self.settings, self.request.session) - results, query = tse.build_sparql_query_from_json(em.listAskomicsEndpoints(),lfrom, body["variates"], body["constraintesRelations"], True) - + #lfrom = [] + #if 'from' in body: + # lfrom = body['from'] + self.log.info("********************* sparqlquery ***************************"); + self.log.info("ENDPOINTS:"+str(body["endpoints"])) + self.log.info("GRAPHS:"+str(body["graphs"])) + results, query, typeRequest = tse.build_sparql_query_from_json( + body["endpoints"], + body["graphs"], + body["variates"], + body["constraintesRelations"], + True) # Remove prefixes in the results table limit = int(body["limit"]) + 1 if body["limit"] != -1 and limit < len(results): @@ -953,7 +961,7 @@ def get_value(self): self.data['file'] = query_laucher.format_results_csv(results, ordered_headers) if persist: - jm.updateEndSparqlJob(jobid,"Ok",nr=len(results),data=self.data['values'], file=self.data['file']) + jm.updateEndSparqlJob(jobid,"Ok "+typeRequest,nr=len(results),data=self.data['values'], file=self.data['file']) except Exception as e: #exc_type, exc_value, exc_traceback = sys.exc_info() @@ -963,7 +971,7 @@ def get_value(self): self.data['file'] = "" if persist: - jm.updateEndSparqlJob(jobid,"Error") + jm.updateEndSparqlJob(jobid,"Error "+typeRequest) jm.updatePreviewJob(jobid,str(e)) self.data['galaxy'] = self.request.session['galaxy'] diff --git a/askomics/libaskomics/TripleStoreExplorer.py b/askomics/libaskomics/TripleStoreExplorer.py index e7c534ac..17e9d1b3 100644 --- a/askomics/libaskomics/TripleStoreExplorer.py +++ b/askomics/libaskomics/TripleStoreExplorer.py @@ -8,6 +8,7 @@ from askomics.libaskomics.rdfdb.SparqlQueryBuilder import SparqlQueryBuilder from askomics.libaskomics.rdfdb.SparqlQueryGraph import SparqlQueryGraph +from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher from askomics.libaskomics.rdfdb.MultipleQueryLauncher import MultipleQueryLauncher from askomics.libaskomics.rdfdb.FederationQueryLauncher import FederationQueryLauncher @@ -87,8 +88,9 @@ def getUserAbstraction(self): data['categories'] = ql.process_query(sqg.get_public_abstraction_category_entity().query,em.listAskomicsEndpoints()) data['categories'] += ql.process_query(sqg.get_user_abstraction_category_entity().query,em.listAskomicsEndpoints()) data['positionable'] = ql.process_query(sqg.get_abstraction_positionable_entity().query,em.listAskomicsEndpoints()) - data['graph'] = sqg.getGraphUser() - + data['endpoints'] = sqg.getGraphUser() + self.log.debug("============== ENDPOINTS AND GRAPH =====================================") + self.log.debug(data['endpoints']) return data def build_recursive_block(self, tabul, constraints): @@ -134,14 +136,39 @@ def build_sparql_query_from_json(self, listEndpoints, fromgraphs, variates, cons # if limit != None and limit > 0: # query += ' LIMIT ' + str(limit) - + print("============ build_sparql_query_from_json ========") + print("endpoints:"+str(listEndpoints)) + print("graphs"+str(fromgraphs)) if send_request_to_tps: - query_launcher = FederationQueryLauncher(self.settings, self.session,listEndpoints) + if len(listEndpoints)==0: + print("============ QueryLauncher1 ========") + typeQuery = '' + query_launcher = QueryLauncher(self.settings, self.session) + elif len(listEndpoints)==1: + print("============ QueryLauncher ========") + typeQuery = '' + query_launcher = QueryLauncher(self.settings, self.session,name = listEndpoints[0], endpoint = listEndpoints[0]) + else: + print("============ FederationQueryLauncher ========") + typeQuery = '(Federation)' + lE = [] + iCount = 0 + for ul in listEndpoints: + iCount+=1 + end = {} + end['name'] = "endpoint"+str(iCount) + end['endpoint'] = ul + end['auth'] = 'Basic' + end['username'] = None + end['password'] = None + lE.append(end) + query_launcher = FederationQueryLauncher(self.settings, self.session,lE) + results = query_launcher.process_query(sqb.custom_query(fromgraphs, select, query).query) else: results = [] - return results, sqb.custom_query(fromgraphs, select, query).query + return results, sqb.custom_query(fromgraphs, select, query).query,typeQuery def get_prefix_uri(self): sqg = SparqlQueryGraph(self.settings, self.session) diff --git a/askomics/libaskomics/rdfdb/FederationQueryLauncher.py b/askomics/libaskomics/rdfdb/FederationQueryLauncher.py index 13f3a628..6289dcc7 100644 --- a/askomics/libaskomics/rdfdb/FederationQueryLauncher.py +++ b/askomics/libaskomics/rdfdb/FederationQueryLauncher.py @@ -32,7 +32,7 @@ def __init__(self, settings, session,lendpoints): for endp in lendpoints: self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' #add local TPS - self.commentsForFed+="#endpoint,local,"+self.get_param("askomics.endpoint")+',false\n' + #self.commentsForFed+="#endpoint,local,"+self.get_param("askomics.endpoint")+',false\n' if not self.is_defined("askomics.fdendpoint") : raise ValueError("can not find askomics.fdendpoint property in the config file !") diff --git a/askomics/libaskomics/rdfdb/MultipleQueryLauncher.py b/askomics/libaskomics/rdfdb/MultipleQueryLauncher.py index 65f44e6a..812fcdb6 100644 --- a/askomics/libaskomics/rdfdb/MultipleQueryLauncher.py +++ b/askomics/libaskomics/rdfdb/MultipleQueryLauncher.py @@ -22,8 +22,7 @@ def __init__(self, settings, session): self.log = logging.getLogger(__name__) self.log.info(" =================== Multiple Query Lancher Request ===================="); - - def process_query(self,query,lendpoints): + def process_query(self,query,lendpoints,indexByEndpoint=False): ''' Execute query and parse the results if exist ''' @@ -31,7 +30,11 @@ def process_query(self,query,lendpoints): # Request on local Askomics self.setUserDatastore() json_query = self._execute_query(query, log_raw_results=False) - results = self.parse_results(json_query) + if indexByEndpoint: + results = {} + results[self.endpoint] = self.parse_results(json_query) + else: + results = self.parse_results(json_query) # then other askomics endpoint defined by the user for es in lendpoints: @@ -59,6 +62,10 @@ def process_query(self,query,lendpoints): self.allowUpdate = False json_query = self._execute_query(query,log_raw_results=False) - results += self.parse_results(json_query) + + if indexByEndpoint: + results[self.endpoint] = self.parse_results(json_query) + else: + results += self.parse_results(json_query) return results diff --git a/askomics/libaskomics/rdfdb/QueryLauncher.py b/askomics/libaskomics/rdfdb/QueryLauncher.py index d7a72704..4424257f 100755 --- a/askomics/libaskomics/rdfdb/QueryLauncher.py +++ b/askomics/libaskomics/rdfdb/QueryLauncher.py @@ -189,11 +189,11 @@ def parse_results(self, json_res): ''' if json_res is None: - raise ValueError("Communication was broken betwwen askomics and datastore.") + raise ValueError("Unable to get a response from the datastore.") if type(json_res) is not dict: self.log.debug(str(json_res)) - raise ValueError("Invalide format response from datastore .") + return [] if "results" not in json_res: return [] @@ -279,6 +279,8 @@ def load_data(self, url, graphName): """ self.log.debug("Loading into triple store (LOAD method) the content of: %s", url) + self.setUserDatastore() + query_string = "LOAD <"+url+"> INTO GRAPH"+ " <" + graphName + ">" res = self._execute_query(query_string) @@ -327,6 +329,8 @@ def insert_data(self, ttl_string, graph, ttl_header=""): self.log.debug("Loading into triple store (INSERT DATA method) the content: "+ttl_string[:50]+"[...]") + self.setUserDatastore() + query_string = ttl_header query_string += "\n" query_string += "INSERT DATA {\n" diff --git a/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py b/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py index b579c7ec..333f2aa0 100644 --- a/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py +++ b/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py @@ -30,40 +30,47 @@ def getGraphUser(self,removeGraph=[]): qu = self.build_query_on_the_fly({ 'select': '?g', 'query': 'GRAPH ?g {\n'+\ - "?g dc:creator '" + self.session['username'] + "' . } ", + "?g dc:creator '" + self.session['username'] + "' .\n"+ + " } ", 'post_action': 'GROUP BY ?g' }, True) ql = QueryLauncher(self.settings, self.session) results = ql.process_query(qu.query) - settings['private'] = [] + endpoint = self.get_param("askomics.endpoint") + settings[endpoint] = {} + settings[endpoint]['private'] = [] for elt in results: if 'g' not in elt: continue if elt['g'] in removeGraph: continue - settings['private'].append(elt['g']) - self.log.debug("setting['private']:\n"+str(settings['private'])) + settings[endpoint]['private'].append(elt['g']) #finding all public graph on all Askomics endpoint qu = self.build_query_on_the_fly({ 'select': '?g', 'query': 'GRAPH ?g {\n'+ - "?g :accessLevel 'public'. } " + "?g :accessLevel 'public'. \n"+ + "} ", + 'post_action': 'GROUP BY ?g' }, True) ql = MultipleQueryLauncher(self.settings, self.session) em = EndpointManager(self.settings, self.session) - results = ql.process_query(qu.query,em.listAskomicsEndpoints()) - settings['public'] = [] - for elt in results: - if elt['g'] in removeGraph: - continue - settings['public'].append(elt['g']) + results = ql.process_query(qu.query,em.listAskomicsEndpoints(),indexByEndpoint=True) + + for endpoint in results: + settings[endpoint] = {} + settings[endpoint]['public'] = [] + for elt in results[endpoint]: + if elt['g'] in removeGraph: + continue + settings[endpoint]['public'].append(elt['g']) - self.log.debug("setting['public']:\n"+str(settings['public'])) + self.log.debug("setting:\n"+str(settings)) return settings def build_query_on_the_fly(self, replacement, adminrequest=False): @@ -90,13 +97,17 @@ def build_query_on_the_fly(self, replacement, adminrequest=False): #add ALL GRAPHS user only if from is not defined !! if 'from' not in set(replacement) or \ len(replacement['from']) == 0: - graphs = self.getGraphUser() - listfrom = graphs['public'] + graphs['private'] + endpoints = self.getGraphUser() + for graphs in endpoints: + listfrom = endpoints[graphs]['public'] + if 'private' in endpoints[graphs]: + endpoints[graphs]['private'] else: listfrom = replacement['from'] if len(listfrom) > 0: for elt in set(listfrom): + self.log.info(elt) query += "FROM <"+elt+">\n" query += "WHERE {"+"\n" diff --git a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py index 52162b92..548bd071 100644 --- a/askomics/libaskomics/rdfdb/SparqlQueryGraph.py +++ b/askomics/libaskomics/rdfdb/SparqlQueryGraph.py @@ -111,19 +111,9 @@ def get_public_graphs(self): return self.build_query_on_the_fly({ 'select': '?g', 'query': 'GRAPH ?g {\n'+ - "?g :accessLevel 'public'. } " - }, True) - - def get_user_graph_infos(self): - """Get infos of all datasets owned by a user""" - return self.build_query_on_the_fly({ - 'select': '?g ?name ?date ?access', - 'query': 'GRAPH ?g {\n' + - '\t?g prov:generatedAtTime ?date .\n' + - '\t?g prov:wasDerivedFrom ?name .\n'+ - '\t?g :accessLevel ?access .\n' + - '}', - 'post_action': 'GROUP BY ?g ?name ?date ?access' + "?g :accessLevel 'public'. \n" + + " } ", + 'post_action': 'GROUP BY ?g' }, True) def get_user_graph_infos_with_count(self): diff --git a/askomics/libaskomics/source_file/SourceFile.py b/askomics/libaskomics/source_file/SourceFile.py index b81ce15d..a1af237d 100644 --- a/askomics/libaskomics/source_file/SourceFile.py +++ b/askomics/libaskomics/source_file/SourceFile.py @@ -87,6 +87,11 @@ def insert_metadatas(self,accessL): ttl += '<' + self.graph + '> dc:hasVersion "' + get_distribution('Askomics').version + '" .\n' ttl += '<' + self.graph + '> prov:describesService "' + os.uname()[1] + '" .\n' + if self.is_defined("askomics.endpoint"): + ttl += '<' + self.graph + '> prov:atLocation "' + self.get_param("askomics.endpoint") + '" .\n' + else: + raise ValueError("askomics.endpoint does not exit.") + sparql_header = sqb.header_sparql_config('') query_laucher.insert_data(ttl, self.graph, sparql_header) @@ -269,7 +274,7 @@ def persist(self, urlbase, public): self.metadatas['server'] = queryResults.info()['server'] else: self.metadatas['server'] = 'unknown' - + data['status'] = 'ok' data['total_triple_count'] = total_triple_count diff --git a/askomics/static/src/js/core/AskomicsForceLayoutManager.js b/askomics/static/src/js/core/AskomicsForceLayoutManager.js index ef6b7989..e064ad31 100644 --- a/askomics/static/src/js/core/AskomicsForceLayoutManager.js +++ b/askomics/static/src/js/core/AskomicsForceLayoutManager.js @@ -501,7 +501,8 @@ class AskomicsForceLayoutManager { let tab = __ihm.getAbstraction().getRelationsObjectsAndSubjectsWithURI(slt_node.uri); let objectsTarget = tab[0]; /* All triplets which slt_node URI are the subject */ let subjectsTarget = tab[1]; /* All triplets which slt_node URI are the object */ - + //console.log("insertSuggestionsWithNewNode"); + //console.log(JSON.stringify(objectsTarget)); let link; for (var uri in objectsTarget ) { @@ -745,7 +746,7 @@ class AskomicsForceLayoutManager { /* increment the number of link between the two nodes */ let linkbase = {} ; linkbase.uri = "is a" ; - console.log(JSON.stringify(suggestedNode)); + //console.log(JSON.stringify(suggestedNode)); //link = new AskomicsLink(linkbase,source,target); let link = new AskomicsIsALink(linkbase,source,suggestedNode); link.id = __ihm.getGraphBuilder().getId(); diff --git a/askomics/static/src/js/core/AskomicsGraphBuilder.js b/askomics/static/src/js/core/AskomicsGraphBuilder.js index 699d7de0..6b7292b2 100644 --- a/askomics/static/src/js/core/AskomicsGraphBuilder.js +++ b/askomics/static/src/js/core/AskomicsGraphBuilder.js @@ -355,6 +355,64 @@ } } + getEndpointAndGraphCategory(uri) { + let graphs = {} ; + let endpoints = {} ; + for (let g in __ihm.localUserAbstraction.uriToGraph[uri]) { + if (! (g in graphs)) { + graphs[g] = 1; + } + } + /* endpoints */ + for (let g in graphs) { + let endp = __ihm.localUserAbstraction.graphToEndpoint[g]; + if (! (endp in endpoints)) { + endpoints[endp] = 1; + } + } + + return [Object.keys(endpoints),Object.keys(graphs)]; + } + + /* browse nodes and edges to get graph and endpoints involved */ + getEndpointAndGraph() { + /* copy arrays to avoid to removed nodes and links instancied */ + let dup_node_array = $.extend(true, [], this._instanciedNodeGraph); + let dup_link_array = $.extend(true, [], this._instanciedLinkGraph); + + let graphs = {} ; + let endpoints = {} ; + + for (let idx=0;idx").append($("").attr("class","medium").append(endpoint)); + // let at = $("").attr("class","medium").append(endpoint); + $("#"+menu.listObjectMenu).append(at); + $.each(listGraph[endpoint], function(g) { + let graph = listGraph[endpoint][g]; + let li = buildLiView(graph,__ihm.graphname(graph).name,true); + li.on('click',function() { let span = $(this).find(".glyphicon"); let cur_uri = $(this).attr("uri"); if ( span.css("visibility") == "visible" ) { @@ -351,6 +355,7 @@ var graphFuncMenu = function(menu) { /* regenerate the graph */ __ihm.getSVGLayout().update(); }); - $("#"+menu.listObjectMenu).append(li).append($("
  • ")); + $("#"+menu.listObjectMenu).append(li).append($("
  • ")); + }); }); }; diff --git a/askomics/static/src/js/core/AskomicsUserAbstraction.js b/askomics/static/src/js/core/AskomicsUserAbstraction.js index c6274fa5..373f901d 100644 --- a/askomics/static/src/js/core/AskomicsUserAbstraction.js +++ b/askomics/static/src/js/core/AskomicsUserAbstraction.js @@ -148,13 +148,34 @@ class AskomicsUserAbstraction { /* All relation are stored in tripletSubjectRelationObject */ iua.tripletSubjectRelationObject = resultListTripletSubjectRelationObject.relations; /* == External Service can add external relation == */ - console.log("ALL::"+JSON.stringify(resultListTripletSubjectRelationObject)); + //console.log("relations::"+JSON.stringify(iua.tripletSubjectRelationObject)); iua.entityInformationList = {}; iua.entityPositionableInformationList = {}; iua.attributesEntityList = {}; iua.entitySubclassof = {} ; + // to know where (endpoint) wich graph are defined for each uri + // [uri]-->list([graph] -> list[endpoint]) + iua.uriToGraph = {} ; + iua.graphToEndpoint = {} ; + + for (let endpoint in resultListTripletSubjectRelationObject.endpoints){ + if ( 'private' in resultListTripletSubjectRelationObject.endpoints[endpoint]) { + for (let igraph in resultListTripletSubjectRelationObject.endpoints[endpoint]['private']){ + let graph = resultListTripletSubjectRelationObject.endpoints[endpoint]['private'][igraph]; + //console.log(endpoint+"==>"+graph); + iua.graphToEndpoint[graph] = endpoint ; + } + } + for (let igraph in resultListTripletSubjectRelationObject.endpoints[endpoint]['public']){ + let graph = resultListTripletSubjectRelationObject.endpoints[endpoint]['public'][igraph]; + //console.log(endpoint+"==>"+graph); + iua.graphToEndpoint[graph] = endpoint ; + } + } + + /***************************** ENTITIES **************************************/ /* All information about an entity available in TPS are stored in entityInformationList */ for (let entry in resultListTripletSubjectRelationObject.entities){ //console.log("ENTITY:"+JSON.stringify(resultListTripletSubjectRelationObject.entities[entry])); @@ -171,8 +192,30 @@ class AskomicsUserAbstraction { } iua.entityInformationList[graph][uri][rel] = val; + if ( ! (uri in iua.uriToGraph) ) { + iua.uriToGraph[uri] = {} ; + } + iua.uriToGraph[uri][graph]=1; + } + + /***************************** RELATIONS **************************************/ + + for (let entry in resultListTripletSubjectRelationObject.relations){ + //console.log("ENTITY:"+JSON.stringify(resultListTripletSubjectRelationObject.entities[entry])); + let graph = resultListTripletSubjectRelationObject.relations[entry].g; + let uri = resultListTripletSubjectRelationObject.relations[entry].subject; + let rel = resultListTripletSubjectRelationObject.relations[entry].relation; + let val = resultListTripletSubjectRelationObject.relations[entry].object; + + if ( ! (rel in iua.uriToGraph) ) { + iua.uriToGraph[rel] = {} ; + } + iua.uriToGraph[rel][graph]=1; + } + /***************************** ATTRIBUTES **************************************/ + for (let entry2 in resultListTripletSubjectRelationObject.attributes){ //console.log("ATTRIBUTE:"+JSON.stringify(resultListTripletSubjectRelationObject.attributes[entry2])); let graph = resultListTripletSubjectRelationObject.attributes[entry2].g; @@ -197,7 +240,9 @@ class AskomicsUserAbstraction { graphIUA[uri2].push(attribute); } - for (let entry3 in resultListTripletSubjectRelationObject.categories){ + /***************************** CATEGORIES **************************************/ + + for (let entry3 in resultListTripletSubjectRelationObject.categories){ //console.log("CATEGORY:"+JSON.stringify(resultListTripletSubjectRelationObject.categories[entry3])); let graph = resultListTripletSubjectRelationObject.categories[entry3].g; let uri3 = resultListTripletSubjectRelationObject.categories[entry3].entity; @@ -217,9 +262,25 @@ class AskomicsUserAbstraction { } iua.attributesEntityList[graph][uri3].push(attribute); - } - for (let entry4 in resultListTripletSubjectRelationObject.positionable){ + if ( ! (attribute.uri in iua.uriToGraph) ) { + iua.uriToGraph[attribute.uri] = {} ; + } + iua.uriToGraph[attribute.uri][graph]=1; + + } +/* + for (let idx in iua.uriToGraph ) { + iua.uriToGraph[idx] = Object.keys(iua.uriToGraph[idx]); + }*/ + /* + console.log(" ==================================== ORIGINE ===================================="); + console.log(JSON.stringify(iua.uriToGraph)); + console.log(" ==================================== G -> E ===================================="); + console.log(JSON.stringify(iua.graphToEndpoint)); + console.log(" ==================================== FIN ORIGINE ===================================="); + */ + for (let entry4 in resultListTripletSubjectRelationObject.positionable){ //console.log('POSITIONABLE:'+JSON.stringify(resultListTripletSubjectRelationObject.positionable[entry4])); var uri4 = resultListTripletSubjectRelationObject.positionable[entry4].entity; if ( ! (uri4 in iua.entityPositionableInformationList) ) { @@ -342,6 +403,8 @@ class AskomicsUserAbstraction { */ getRelationsObjectsAndSubjectsWithURI(UriSelectedNode) { + //console.log("++++++++ getRelationsObjectsAndSubjectsWithURI ++++++++"); + //console.log("UriSelectedNode="+UriSelectedNode); let objectsTarget = {} ; let subjectsTarget = {} ; @@ -365,26 +428,31 @@ class AskomicsUserAbstraction { } } if ( this.tripletSubjectRelationObject[i].subject == UriSelectedNode ) { - + //console.log("AsSubject:="+JSON.stringify(this.tripletSubjectRelationObject[i])); + //console.log("lentities:="+JSON.stringify(lentities)); /* check if graph is not removed */ if ( !(this.tripletSubjectRelationObject[i].object in lentities) ) continue; - + //console.log("1"); if (! (this.tripletSubjectRelationObject[i].object in objectsTarget) ) { + //console.log("2"); objectsTarget[this.tripletSubjectRelationObject[i].object] = {}; } - + //console.log("3"); if ( ! (this.tripletSubjectRelationObject[i].relation in objectsTarget[this.tripletSubjectRelationObject[i].object] ) ) { + //console.log("4"); objectsTarget[this.tripletSubjectRelationObject[i].object][this.tripletSubjectRelationObject[i].relation]=0; } } } + //console.log("objectsTarget:"+JSON.stringify(objectsTarget)); for ( let i in objectsTarget ) { objectsTarget[i] = Object.keys(objectsTarget[i]); } for ( let i in subjectsTarget ) { subjectsTarget[i] = Object.keys(subjectsTarget[i]); } + //console.log("objectsTarget:"+JSON.stringify(objectsTarget)); return [objectsTarget, subjectsTarget]; } @@ -453,13 +521,15 @@ class AskomicsUserAbstraction { } listGraphAvailable() { + let listG = {} ; - for (let g in this.entityInformationList) { - if (! (g in listG ) ) listG[g] = 0; - } - for (let g in this.attributesEntityList) { - if (! (g in listG ) ) listG[g] = 0; + for (let g in this.graphToEndpoint) { + if ( ! (this.graphToEndpoint[g] in listG)) { + listG[this.graphToEndpoint[g]] = [] ; + } + listG[this.graphToEndpoint[g]].push(g); } + return JSON.parse(JSON.stringify(listG)); } diff --git a/askomics/static/src/js/services/AskomicsJobsManager.js b/askomics/static/src/js/services/AskomicsJobsManager.js index 25f9e672..c9d4cb96 100644 --- a/askomics/static/src/js/services/AskomicsJobsManager.js +++ b/askomics/static/src/js/services/AskomicsJobsManager.js @@ -33,9 +33,9 @@ let instanceAskomicsJobsViewManager ; } static getClassTr(state) { - if (state == "Ok" ) return "bg-success"; + if (state.startsWith("Ok") ) return "bg-success"; if (state == "Done" ) return "bg-info"; - if (state == "Error" )return "bg-danger"; + if (state.startsWith("Error") )return "bg-danger"; return "bg-warning"; } @@ -119,7 +119,11 @@ let instanceAskomicsJobsViewManager ; console.log('+++ prepareQuery +++'); let tab = __ihm.getGraphBuilder().buildConstraintsGraph(); + let tab2 = __ihm.getGraphBuilder().getEndpointAndGraph(); + return { + 'endpoints' : tab2[0], + 'graphs' : tab2[1], 'variates' : tab[0], 'constraintesRelations': tab[1], 'constraintesFilters' : tab[2], diff --git a/askomics/static/src/js/view/AskomicsNodeView.js b/askomics/static/src/js/view/AskomicsNodeView.js index 45677e80..3bcf1fb0 100644 --- a/askomics/static/src/js/view/AskomicsNodeView.js +++ b/askomics/static/src/js/view/AskomicsNodeView.js @@ -78,18 +78,22 @@ class AskomicsNodeView extends AskomicsObjectView { let listForceFrom = listGraphsWithAtt; if (contextDependancy) { tab = this.node.buildConstraintsGraphForCategory(attribute.id,true); - let tab2 = __ihm.getGraphBuilder().buildConstraintsGraph(); - tab[1][0] = [].concat.apply([], [tab[1][0], tab2[1][0]]); +// let tab2 = __ihm.getGraphBuilder().buildConstraintsGraph(); +// tab[1][0] = [].concat.apply([], [tab[1][0], tab2[1][0]]); listForceFrom = []; /* unactive if query concern a user query */ } else { tab = this.node.buildConstraintsGraphForCategory(attribute.id,false); } + console.log(attribute.uri); + let endpAndGraphs = __ihm.getGraphBuilder().getEndpointAndGraphCategory(this.node.uri) ; inp.attr("list", "opt_" + labelSparqlVarId) .attr("sparqlid",URISparqlVarId); //console.log(JSON.stringify(nameDiv)); var service = new RestServiceJs("sparqlquery"); var model = { + 'endpoints' : endpAndGraphs[0], + 'graphs' : endpAndGraphs[1], 'variates' : tab[0], 'constraintesRelations': tab[1], 'limit' :-1, diff --git a/askomics/static/src/js/view/integration.js b/askomics/static/src/js/view/integration.js index 47c890f3..092e2427 100644 --- a/askomics/static/src/js/view/integration.js +++ b/askomics/static/src/js/view/integration.js @@ -38,6 +38,7 @@ $(function () { headers.push($(this).val()); }); loadSourceFile($(event.target).closest('.template-source_file'), false, headers); + __ihm.displayModal('Upload TSV file.', '', 'Close'); }); // Load the tsv file into the public graph diff --git a/askomics/static/src/js/view/parameters/ShortcutsParametersView.js b/askomics/static/src/js/view/parameters/ShortcutsParametersView.js index e79d43f2..e33326d9 100644 --- a/askomics/static/src/js/view/parameters/ShortcutsParametersView.js +++ b/askomics/static/src/js/view/parameters/ShortcutsParametersView.js @@ -150,7 +150,7 @@ class ShortcutsParametersView extends InterfaceParametersView { let service = new RestServiceJs("sparqlquery"); let param = { 'export' : false, - 'endpoint' : [], // TODO manage endpoint from IHM ? + 'endpoints' : [], // TODO manage endpoint from IHM ? 'graphs' : ['askomics:graph:shortcut'], 'variates' : ["?graph","?shortcut","?label","?comment","?version","?in","?out","?output_var","?output_varname","?prefix_string","?sparql_string"], 'constraintesRelations': [[["?shortcut a :shortcuts", From 6f78948669e67165e207b0f256a0e4002d617eab Mon Sep 17 00:00:00 2001 From: ofilangi Date: Wed, 13 Dec 2017 18:07:05 +0100 Subject: [PATCH 009/136] change timeout 1200->1800 --- askomics/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/askomics/__init__.py b/askomics/__init__.py index dbbe67da..7bcec284 100644 --- a/askomics/__init__.py +++ b/askomics/__init__.py @@ -8,7 +8,7 @@ def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ - my_session_factory = SignedCookieSessionFactory('itsaseekreet',timeout=2400) + my_session_factory = SignedCookieSessionFactory('itsaseekreet',timeout=1800) config = Configurator(settings=settings) config.set_session_factory(my_session_factory) From ce285a26e4882eb4de4f076ed4b4808805bb7547 Mon Sep 17 00:00:00 2001 From: ofilangi Date: Wed, 13 Dec 2017 18:09:06 +0100 Subject: [PATCH 010/136] increase timeout check session --- askomics/static/src/js/core/AskomicsUser.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/askomics/static/src/js/core/AskomicsUser.js b/askomics/static/src/js/core/AskomicsUser.js index a126fcdd..64062450 100644 --- a/askomics/static/src/js/core/AskomicsUser.js +++ b/askomics/static/src/js/core/AskomicsUser.js @@ -24,7 +24,7 @@ class AskomicsUser { if ( user.isLogin() ) { user.intervalListener = setInterval(function(){ user.checkUser(); - }, 15000); + }, 30000); __ihm.displayNavbar(true, user.username, user.admin, user.blocked); } else { if ( user.intervalListener != undefined ) { From 9939a224c682ab24bbae8a76a194585f48503172 Mon Sep 17 00:00:00 2001 From: ofilangi Date: Wed, 13 Dec 2017 18:12:05 +0100 Subject: [PATCH 011/136] external endpoin management (test with reactom EBI) --- askomics/ask_view.py | 23 +- askomics/libaskomics/TripleStoreExplorer.py | 49 ++- .../rdfdb/FederationQueryLauncher.py | 5 +- .../libaskomics/rdfdb/SparqlQueryBuilder.py | 43 +++ askomics/static/modules/biopax.ttl | 307 +++++++++++------- askomics/static/modules/go.ttl | 20 ++ askomics/static/modules/prov.ttl | 101 +++--- .../src/js/core/AskomicsGraphBuilder.js | 42 ++- .../src/js/core/AskomicsUserAbstraction.js | 18 + askomics/static/src/js/core/IHMLocal.js | 12 +- .../src/js/services/AskomicsJobsManager.js | 1 + .../src/templates/handlebars/endpoints.hbs | 25 +- askomics/upload.py | 6 +- configs/development.virtuoso.ini | 3 +- configs/production.virtuoso.ini | 8 +- 15 files changed, 464 insertions(+), 199 deletions(-) diff --git a/askomics/ask_view.py b/askomics/ask_view.py index 7558fed8..dfaffb1b 100644 --- a/askomics/ask_view.py +++ b/askomics/ask_view.py @@ -413,10 +413,14 @@ def list_endpoints(self): """ self.checkAuthSession() + session = {} em = EndpointManager(self.settings, self.request.session) + session['askomics'] = em.listEndpoints() - return em.listEndpoints() + sqb = SparqlQueryBuilder(self.settings, self.request.session) + session['external'] = sqb.getExternalServiceEndpoint() + return session @view_config(route_name='guess_csv_header_type', request_method='POST') @@ -933,14 +937,15 @@ def get_value(self): jobid = jm.saveStartSparqlJob("SPARQL Request",requestGraph=rg) try: + typeRequest = '' tse = TripleStoreExplorer(self.settings, self.request.session) - #lfrom = [] - #if 'from' in body: - # lfrom = body['from'] - self.log.info("********************* sparqlquery ***************************"); - self.log.info("ENDPOINTS:"+str(body["endpoints"])) - self.log.info("GRAPHS:"+str(body["graphs"])) + + endpoints_ext = [] + if 'endpoints_ext' in body: + endpoints_ext = body["endpoints_ext"] + results, query, typeRequest = tse.build_sparql_query_from_json( + endpoints_ext, body["endpoints"], body["graphs"], body["variates"], @@ -1020,7 +1025,6 @@ def getSparqlQueryInTextFormat(self): @view_config(route_name='ttl', request_method='GET') def uploadTtl(self): - pm = ParamManager(self.settings, self.request.session) response = FileResponse( pm.get_rdf_directory()+self.request.matchdict['name'], @@ -1804,13 +1808,11 @@ def send2galaxy(self): @view_config(route_name='get_uploaded_files', request_method="GET") def get_uploaded_files(self): - param_manager = ParamManager(self.settings, self.request.session) path = param_manager.get_upload_directory() self.data = {} self.data['files'] = {} - files = os.listdir(path) for file in files: @@ -1819,7 +1821,6 @@ def get_uploaded_files(self): self.data['files'][file] = file_size self.data['galaxy'] = self.request.session['galaxy'] - return self.data @view_config(route_name="delete_uploaded_files", request_method="POST") diff --git a/askomics/libaskomics/TripleStoreExplorer.py b/askomics/libaskomics/TripleStoreExplorer.py index 17e9d1b3..567775b9 100644 --- a/askomics/libaskomics/TripleStoreExplorer.py +++ b/askomics/libaskomics/TripleStoreExplorer.py @@ -89,6 +89,8 @@ def getUserAbstraction(self): data['categories'] += ql.process_query(sqg.get_user_abstraction_category_entity().query,em.listAskomicsEndpoints()) data['positionable'] = ql.process_query(sqg.get_abstraction_positionable_entity().query,em.listAskomicsEndpoints()) data['endpoints'] = sqg.getGraphUser() + data['endpoints_ext'] = sqg.getExternalServiceEndpoint() + self.log.debug("============== ENDPOINTS AND GRAPH =====================================") self.log.debug(data['endpoints']) return data @@ -123,7 +125,7 @@ def build_recursive_block(self, tabul, constraints): return req return "" - def build_sparql_query_from_json(self, listEndpoints, fromgraphs, variates, constraintes_relations,limit, send_request_to_tps=True): + def build_sparql_query_from_json(self, listExternalEndpoints,listEndpoints, fromgraphs, variates, constraintes_relations,limit, send_request_to_tps=True): """ Build a sparql query from JSON constraints """ @@ -136,20 +138,26 @@ def build_sparql_query_from_json(self, listEndpoints, fromgraphs, variates, cons # if limit != None and limit > 0: # query += ' LIMIT ' + str(limit) - print("============ build_sparql_query_from_json ========") - print("endpoints:"+str(listEndpoints)) - print("graphs"+str(fromgraphs)) + self.log.debug("============ build_sparql_query_from_json ========") + self.log.debug("endpoints_ext:"+str(listExternalEndpoints)) + self.log.debug("endpoints:"+str(listEndpoints)) + self.log.debug("graphs"+str(fromgraphs)) if send_request_to_tps: - if len(listEndpoints)==0: - print("============ QueryLauncher1 ========") + if len(listEndpoints)+len(listExternalEndpoints)==0: + self.log.debug("============ QueryLauncher1 ========") typeQuery = '' query_launcher = QueryLauncher(self.settings, self.session) - elif len(listEndpoints)==1: - print("============ QueryLauncher ========") + elif len(listEndpoints)+len(listExternalEndpoints)==1: + self.log.debug("============ QueryLauncher ========") typeQuery = '' - query_launcher = QueryLauncher(self.settings, self.session,name = listEndpoints[0], endpoint = listEndpoints[0]) + endpoint = '' + if len(listEndpoints) == 1 : + endpoint = listEndpoints[0] + else: + endpoint = listExternalEndpoints[0] + query_launcher = QueryLauncher(self.settings, self.session,name = endpoint, endpoint = endpoint) else: - print("============ FederationQueryLauncher ========") + self.log.debug("============ FederationQueryLauncher ========") typeQuery = '(Federation)' lE = [] iCount = 0 @@ -158,10 +166,31 @@ def build_sparql_query_from_json(self, listEndpoints, fromgraphs, variates, cons end = {} end['name'] = "endpoint"+str(iCount) end['endpoint'] = ul + end['askomics'] = True + end['auth'] = 'Basic' + end['username'] = None + end['password'] = None + lE.append(end) + + for ul in listExternalEndpoints: + iCount+=1 + end = {} + end['name'] = "endpoint"+str(iCount) + end['endpoint'] = ul + end['askomics'] = False end['auth'] = 'Basic' end['username'] = None end['password'] = None lE.append(end) + + print("listEndpoints") + print(listEndpoints) + print("listExternalEndpoints") + print(listExternalEndpoints) + + print("==================================") + print(lE) + query_launcher = FederationQueryLauncher(self.settings, self.session,lE) results = query_launcher.process_query(sqb.custom_query(fromgraphs, select, query).query) diff --git a/askomics/libaskomics/rdfdb/FederationQueryLauncher.py b/askomics/libaskomics/rdfdb/FederationQueryLauncher.py index 6289dcc7..14e5be42 100644 --- a/askomics/libaskomics/rdfdb/FederationQueryLauncher.py +++ b/askomics/libaskomics/rdfdb/FederationQueryLauncher.py @@ -30,7 +30,10 @@ def __init__(self, settings, session,lendpoints): #comments added in sparql request to get all url endpoint. self.commentsForFed="" for endp in lendpoints: - self.commentsForFed+="#endpoint,"+endp['name']+','+endp['endpoint']+',false\n' + if endp['askomics']: + self.commentsForFed+="#endpoint,askomics,"+endp['name']+','+endp['endpoint']+',false\n' + else: + self.commentsForFed+="#endpoint,external,"+endp['name']+','+endp['endpoint']+',false\n' #add local TPS #self.commentsForFed+="#endpoint,local,"+self.get_param("askomics.endpoint")+',false\n' diff --git a/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py b/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py index 333f2aa0..4c3bdeba 100644 --- a/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py +++ b/askomics/libaskomics/rdfdb/SparqlQueryBuilder.py @@ -73,6 +73,49 @@ def getGraphUser(self,removeGraph=[]): self.log.debug("setting:\n"+str(settings)) return settings + def getExternalServiceEndpoint(self): + """ + Get all external endpoint finding in all askomics endpoint + """ + qu = self.build_query_on_the_fly({ + 'select': '?name ?url ?description ?class', + 'query': ''+ + "?service a sd:Service .\n"+ + "?service sd:endpoint ?url .\n"+ + "?service dcterms:title ?name .\n"+ + "?service dcterms:description ?description .\n"+ + "?service sd:defaultDataset ?dataset.\n"+ + "?dataset sd:defaultGraph ?graph.\n"+ + "?graph void:classPartition ?partition.\n"+ + "?partition void:class ?class. \n", + 'post_action': 'GROUP BY ?name ?url ?description ?class' + }, True) + + ql = MultipleQueryLauncher(self.settings, self.session) + em = EndpointManager(self.settings, self.session) + + results = ql.process_query(qu.query,em.listAskomicsEndpoints()) + + settings = {} + settings['endpoints'] = {} + settings['entities'] = {} + + for r in results: + if r['url'] not in settings['endpoints']: + endpoint_ext = {} + endpoint_ext['name'] = r['name'] + endpoint_ext['url'] = r['url'] + endpoint_ext['description'] = r['description'] + settings['endpoints'][endpoint_ext['url']] = endpoint_ext + settings['entities'][endpoint_ext['url']] = [] + settings['entities'][endpoint_ext['url']].append(r['class']) + + + self.log.debug("==================== EXTERNAL ENDPOINT ========================") + self.log.debug(settings) + return settings + + def build_query_on_the_fly(self, replacement, adminrequest=False): """ Build a query from the private or public template diff --git a/askomics/static/modules/biopax.ttl b/askomics/static/modules/biopax.ttl index 350581d0..137bc016 100644 --- a/askomics/static/modules/biopax.ttl +++ b/askomics/static/modules/biopax.ttl @@ -1,124 +1,191 @@ ### suprimer cette premiere ligne -@prefix displaySetting: . - +@prefix displaySetting: . +@prefix biopax: . @prefix rdfs: . @prefix xsd: . - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:GeneticInteraction"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:PathwayStep"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:EntityFeature"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Pathway"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:KPrime"^^xsd:string. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:EntityReferenceTypeVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Conversion"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:CellVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:SmallMoleculeReference"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:ModificationFeature"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Complex"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:SequenceModificationVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Provenance"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:ChemicalStructure"^^xsd:string. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:EntityReference"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:SequenceSite"^^xsd:string. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:RelationshipXref"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Evidence"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:InteractionVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:SequenceLocation"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:BiochemicalReaction"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Interaction"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:TissueVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:SequenceInterval"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Stoichiometry"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:RelationshipTypeVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:ExperimentalForm"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:PhysicalEntity"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:EvidenceCodeVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:BioSource"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:DeltaG"^^xsd:string. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:CellularLocationVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:BindingFeature"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:PhenotypeVocabulary"^^xsd:string. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:SequenceRegionVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Entity"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Catalysis"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:ExperimentalFormVocabulary"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:Score"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "biopax:BiochemicalPathwayStep"^^xsd:string. - displaySetting:startPoint "true"^^xsd:boolean." + +@prefix dcterms: . + +@prefix sd: . +@prefix void: . + +[] a sd:Service ; + sd:endpoint ; + dcterms:title "Linked Open Data platform for EBI data"; + dcterms:description "The EBI RDF platform: linked open data for the life sciences - NCBI - NIH."; + sd:supportedLanguage sd:SPARQL11Query ; + sd:defaultDataset [ + a sd:Dataset ; + sd:defaultGraph [ + a sd:Graph ; + void:classPartition + [ + void:class biopax:GeneticInteraction ; + void:class biopax:PathwayStep ; + void:class biopax:CellVocabulary . + ]. + ]. + ]. + +biopax:GeneticInteraction displaySetting:entity "true"^^xsd:boolean. +biopax:GeneticInteraction rdfs:label "biopax:GeneticInteraction"^^xsd:string. +biopax:GeneticInteraction displaySetting:startPoint "true"^^xsd:boolean. + +biopax:PathwayStep displaySetting:entity "true"^^xsd:boolean. +biopax:PathwayStep rdfs:label "biopax:PathwayStep"^^xsd:string. +biopax:PathwayStep displaySetting:startPoint "true"^^xsd:boolean. + +biopax:EntityFeature displaySetting:entity "true"^^xsd:boolean. +biopax:EntityFeature rdfs:label "biopax:EntityFeature"^^xsd:string. +biopax:EntityFeature displaySetting:startPoint "true"^^xsd:boolean. + +biopax:Pathway displaySetting:entity "true"^^xsd:boolean. +biopax:Pathway rdfs:label "biopax:Pathway"^^xsd:string. +biopax:Pathway displaySetting:startPoint "true"^^xsd:boolean. + +biopax:KPrime displaySetting:entity "true"^^xsd:boolean. +biopax:KPrime rdfs:label "biopax:KPrime"^^xsd:string. +biopax:kPrime displaySetting:attribute "true"^^xsd:boolean. + +biopax:EntityReferenceTypeVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:EntityReferenceTypeVocabulary rdfs:label "biopax:EntityReferenceTypeVocabulary"^^xsd:string. + +biopax:Conversion displaySetting:entity "true"^^xsd:boolean. +biopax:Conversion rdfs:label "biopax:Conversion"^^xsd:string. +biopax:Conversion displaySetting:startPoint "true"^^xsd:boolean. + +biopax:spontaneous displaySetting:attribute "true"^^xsd:boolean. + +biopax:CellVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:CellVocabulary rdfs:label "biopax:CellVocabulary"^^xsd:string. + +biopax:SmallMoleculeReference displaySetting:entity "true"^^xsd:boolean. +biopax:SmallMoleculeReference rdfs:label "biopax:SmallMoleculeReference"^^xsd:string. +biopax:SmallMoleculeReference displaySetting:startPoint "true"^^xsd:boolean. + +biopax:chemicalFormula displaySetting:attribute "true"^^xsd:boolean. + +biopax:molecularWeight displaySetting:attribute "true"^^xsd:boolean. + +biopax:ModificationFeature displaySetting:entity "true"^^xsd:boolean. +biopax:ModificationFeature rdfs:label "biopax:ModificationFeature"^^xsd:string. +biopax:ModificationFeature displaySetting:startPoint "true"^^xsd:boolean. + +biopax:Complex displaySetting:entity "true"^^xsd:boolean. +biopax:Complex rdfs:label "biopax:Complex"^^xsd:string. +biopax:Complex displaySetting:startPoint "true"^^xsd:boolean. + +biopax:SequenceModificationVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:SequenceModificationVocabulary rdfs:label "biopax:SequenceModificationVocabulary"^^xsd:string. + +biopax:Provenance displaySetting:entity "true"^^xsd:boolean. +biopax:Provenance rdfs:label "biopax:Provenance"^^xsd:string. + +biopax:ChemicalStructure displaySetting:entity "true"^^xsd:boolean. +biopax:ChemicalStructure rdfs:label "biopax:ChemicalStructure"^^xsd:string. + +biopax:structureData displaySetting:attribute "true"^^xsd:boolean. + +biopax:EntityReference displaySetting:entity "true"^^xsd:boolean. +biopax:EntityReference rdfs:label "biopax:EntityReference"^^xsd:string. +biopax:EntityReference displaySetting:startPoint "true"^^xsd:boolean. + +biopax:SequenceSite displaySetting:entity "true"^^xsd:boolean. +biopax:SequenceSite rdfs:label "biopax:SequenceSite"^^xsd:string. +biopax:sequencePosition displaySetting:attribute "true"^^xsd:boolean. + +biopax:RelationshipXref displaySetting:entity "true"^^xsd:boolean. +biopax:RelationshipXref rdfs:label "biopax:RelationshipXref"^^xsd:string. +biopax:RelationshipXref displaySetting:startPoint "true"^^xsd:boolean. + +biopax:Evidence displaySetting:entity "true"^^xsd:boolean. +biopax:Evidence rdfs:label "biopax:Evidence"^^xsd:string. +biopax:Evidence displaySetting:startPoint "true"^^xsd:boolean. + +biopax:InteractionVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:InteractionVocabulary rdfs:label "biopax:InteractionVocabulary"^^xsd:string. + +biopax:SequenceLocation displaySetting:entity "true"^^xsd:boolean. +biopax:SequenceLocation rdfs:label "biopax:SequenceLocation"^^xsd:string. + +biopax:BiochemicalReaction displaySetting:entity "true"^^xsd:boolean. +biopax:BiochemicalReaction rdfs:label "biopax:BiochemicalReaction"^^xsd:string. +biopax:BiochemicalReaction displaySetting:startPoint "true"^^xsd:boolean. + +biopax:eCNumber displaySetting:attribute "true"^^xsd:boolean. +biopax:deltaH displaySetting:attribute "true"^^xsd:boolean. +biopax:deltaS displaySetting:attribute "true"^^xsd:boolean. +biopax:Interaction displaySetting:entity "true"^^xsd:boolean. +biopax:Interaction rdfs:label "biopax:Interaction"^^xsd:string. +biopax:Interaction displaySetting:startPoint "true"^^xsd:boolean. +biopax:TissueVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:TissueVocabulary rdfs:label "biopax:TissueVocabulary"^^xsd:string. +biopax:SequenceInterval displaySetting:entity "true"^^xsd:boolean. +biopax:SequenceInterval rdfs:label "biopax:SequenceInterval"^^xsd:string. +biopax:SequenceInterval displaySetting:startPoint "true"^^xsd:boolean. +biopax:Stoichiometry displaySetting:entity "true"^^xsd:boolean. +biopax:Stoichiometry rdfs:label "biopax:Stoichiometry"^^xsd:string. +biopax:Stoichiometry displaySetting:startPoint "true"^^xsd:boolean. +biopax:stoichiometricCoefficient displaySetting:attribute "true"^^xsd:boolean. +biopax:RelationshipTypeVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:RelationshipTypeVocabulary rdfs:label "biopax:RelationshipTypeVocabulary"^^xsd:string. +biopax:ExperimentalForm displaySetting:entity "true"^^xsd:boolean. +biopax:ExperimentalForm rdfs:label "biopax:ExperimentalForm"^^xsd:string. +biopax:ExperimentalForm displaySetting:startPoint "true"^^xsd:boolean. +biopax:PhysicalEntity displaySetting:entity "true"^^xsd:boolean. +biopax:PhysicalEntity rdfs:label "biopax:PhysicalEntity"^^xsd:string. +biopax:PhysicalEntity displaySetting:startPoint "true"^^xsd:boolean. +biopax:EvidenceCodeVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:EvidenceCodeVocabulary rdfs:label "biopax:EvidenceCodeVocabulary"^^xsd:string. + +biopax:BioSource displaySetting:entity "true"^^xsd:boolean. +biopax:BioSource rdfs:label "biopax:BioSource"^^xsd:string. +biopax:BioSource displaySetting:startPoint "true"^^xsd:boolean. + +biopax:DeltaG displaySetting:entity "true"^^xsd:boolean. + +biopax:DeltaG rdfs:label "biopax:DeltaG"^^xsd:string. + +biopax:deltaGPrime0 displaySetting:attribute "true"^^xsd:boolean. + +biopax:CellularLocationVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:CellularLocationVocabulary rdfs:label "biopax:CellularLocationVocabulary"^^xsd:string. + +biopax:BindingFeature displaySetting:entity "true"^^xsd:boolean. +biopax:BindingFeature rdfs:label "biopax:BindingFeature"^^xsd:string. +biopax:BindingFeature displaySetting:startPoint "true"^^xsd:boolean. + +biopax:intraMolecular displaySetting:attribute "true"^^xsd:boolean. + +biopax:PhenotypeVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:PhenotypeVocabulary rdfs:label "biopax:PhenotypeVocabulary"^^xsd:string. + +biopax:patoData displaySetting:attribute "true"^^xsd:boolean. + +biopax:SequenceRegionVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:SequenceRegionVocabulary rdfs:label "biopax:SequenceRegionVocabulary"^^xsd:string. + +biopax:Entity displaySetting:entity "true"^^xsd:boolean. +biopax:Entity rdfs:label "biopax:Entity"^^xsd:string. +biopax:Entity displaySetting:startPoint "true"^^xsd:boolean. + +biopax:availability displaySetting:attribute "true"^^xsd:boolean. + +biopax:Catalysis displaySetting:entity "true"^^xsd:boolean. +biopax:Catalysis rdfs:label "biopax:Catalysis"^^xsd:string. +biopax:Catalysis displaySetting:startPoint "true"^^xsd:boolean. + +biopax:ExperimentalFormVocabulary displaySetting:entity "true"^^xsd:boolean. +biopax:ExperimentalFormVocabulary rdfs:label "biopax:ExperimentalFormVocabulary"^^xsd:string. + +biopax:Score displaySetting:entity "true"^^xsd:boolean. +biopax:Score rdfs:label "biopax:Score"^^xsd:string. +biopax:Score displaySetting:startPoint "true"^^xsd:boolean. + +biopax:value displaySetting:attribute "true"^^xsd:boolean. + +biopax:BiochemicalPathwayStep displaySetting:entity "true"^^xsd:boolean. +biopax:BiochemicalPathwayStep rdfs:label "biopax:BiochemicalPathwayStep"^^xsd:string. + +biopax:BiochemicalPathwayStep displaySetting:startPoint "true"^^xsd:boolean. diff --git a/askomics/static/modules/go.ttl b/askomics/static/modules/go.ttl index 4824fed9..e84c4ede 100644 --- a/askomics/static/modules/go.ttl +++ b/askomics/static/modules/go.ttl @@ -9,14 +9,34 @@ @prefix owl: . @prefix displaySetting: . @prefix dc: . +@prefix dcterms: . + +@prefix sd: . +@prefix void: . @prefix obo: . @prefix oboInOwl: . + @prefix go: . + @base . rdf:type owl:Ontology . +[] a sd:Service ; + sd:endpoint ; + dcterms:title "Linked Open Data platform for EBI data"; + dcterms:description "The EBI RDF platform: linked open data for the life sciences - NCBI - NIH."; + sd:supportedLanguage sd:SPARQL11Query ; + sd:defaultDataset [ + a sd:Dataset ; + sd:defaultGraph [ + a sd:Graph ; + void:classPartition [ void:class owl:Class. ]. + ]. + ]. + + owl:Class displaySetting:entity "true"^^xsd:boolean. oboInOwl:id displaySetting:attribute "true"^^xsd:boolean . diff --git a/askomics/static/modules/prov.ttl b/askomics/static/modules/prov.ttl index 673661d0..db6fbb00 100644 --- a/askomics/static/modules/prov.ttl +++ b/askomics/static/modules/prov.ttl @@ -1,41 +1,64 @@ @prefix displaySetting: . @prefix rdfs: . - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - rdfs:label "http://www.w3.org/2002/07/owl#Thing"^^xsd:string. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:attribute "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:startPoint "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. - displaySetting:entity "true"^^xsd:boolean. +@prefix owl: . + +@prefix dcterms: . +@prefix sd: . +@prefix void: . + +@prefix prov: . + + +[] a sd:Service ; + sd:endpoint ; + dcterms:title "Linked Open Data platform for EBI data"; + dcterms:description "The platform provides a set of stable services."; + sd:supportedLanguage sd:SPARQL11Query ; + sd:defaultDataset [ + a sd:Dataset ; + sd:defaultGraph [ + a sd:Graph ; + void:classPartition [ void:class prov:Usage; ]; + void:classPartition [ void:class prov:Agent; ]. + ]. + ]. + +prov:Usage displaySetting:entity "true"^^xsd:boolean. +prov:Agent displaySetting:entity "true"^^xsd:boolean. +prov:Agent displaySetting:startPoint "true"^^xsd:boolean. +prov:ActivityInfluence displaySetting:entity "true"^^xsd:boolean. +prov:ActivityInfluence displaySetting:startPoint "true"^^xsd:boolean. +prov:AgentInfluence displaySetting:entity "true"^^xsd:boolean. +prov:AgentInfluence displaySetting:startPoint "true"^^xsd:boolean. +prov:Derivation displaySetting:entity "true"^^xsd:boolean. +prov:Derivation displaySetting:startPoint "true"^^xsd:boolean. +prov:Influence displaySetting:entity "true"^^xsd:boolean. +prov:Influence displaySetting:startPoint "true"^^xsd:boolean. +prov:Activity displaySetting:entity "true"^^xsd:boolean. +prov:Activity displaySetting:startPoint "true"^^xsd:boolean. +prov:endedAtTime displaySetting:attribute "true"^^xsd:boolean. +prov:startedAtTime displaySetting:attribute "true"^^xsd:boolean. +prov:EntityInfluence displaySetting:entity "true"^^xsd:boolean. +prov:EntityInfluence displaySetting:startPoint "true"^^xsd:boolean. +prov:Invalidation displaySetting:entity "true"^^xsd:boolean. +prov:Collection displaySetting:entity "true"^^xsd:boolean. +prov:Collection displaySetting:startPoint "true"^^xsd:boolean. +prov:Start displaySetting:entity "true"^^xsd:boolean. +prov:PrimarySource displaySetting:entity "true"^^xsd:boolean. +owl:Thing displaySetting:entity "true"^^xsd:boolean. +owl:Thing rdfs:label "http://www.w3.org/2002/07/owl#Thing"^^xsd:string. +prov:Role displaySetting:entity "true"^^xsd:boolean. +prov:Plan displaySetting:entity "true"^^xsd:boolean. +prov:Attribution displaySetting:entity "true"^^xsd:boolean. +prov:End displaySetting:entity "true"^^xsd:boolean. +prov:Entity displaySetting:entity "true"^^xsd:boolean. +prov:Entity displaySetting:startPoint "true"^^xsd:boolean. +prov:generatedAtTime displaySetting:attribute "true"^^xsd:boolean. +prov:invalidatedAtTime displaySetting:attribute "true"^^xsd:boolean. +prov:Association displaySetting:entity "true"^^xsd:boolean. +prov:Association displaySetting:startPoint "true"^^xsd:boolean. +prov:Generation displaySetting:entity "true"^^xsd:boolean. +prov:Revision displaySetting:entity "true"^^xsd:boolean. +prov:Quotation displaySetting:entity "true"^^xsd:boolean. +prov:Communication displaySetting:entity "true"^^xsd:boolean. +prov:Delegation displaySetting:entity "true"^^xsd:boolean. diff --git a/askomics/static/src/js/core/AskomicsGraphBuilder.js b/askomics/static/src/js/core/AskomicsGraphBuilder.js index 6b7292b2..33751bc1 100644 --- a/askomics/static/src/js/core/AskomicsGraphBuilder.js +++ b/askomics/static/src/js/core/AskomicsGraphBuilder.js @@ -403,16 +403,52 @@ /* endpoints */ for (let g in graphs) { - let endp = __ihm.localUserAbstraction.graphToEndpoint[g]; - if (! (endp in endpoints)) { - endpoints[endp] = 1; + if (g in __ihm.localUserAbstraction.graphToEndpoint ) { + let endp = __ihm.localUserAbstraction.graphToEndpoint[g]; + if (! (endp in endpoints)) { + endpoints[endp] = 1; + } } } //console.log("endpoints:"+JSON.stringify(endpoints)); //console.log("graphs:"+JSON.stringify(graphs)); + return [Object.keys(endpoints),Object.keys(graphs)]; } + getExternalEndpoint() { + + /* copy arrays to avoid to removed nodes and links instancied */ + let dup_node_array = $.extend(true, [], this._instanciedNodeGraph); + let dup_link_array = $.extend(true, [], this._instanciedLinkGraph); + + let endpoints = {} ; + + for (let idx=0;idx list[ external endpoint ]. + iua.uriToExternalEndpoint = {} ; + //console.log("EXTERNAL ENDPOINT:"+JSON.stringify(resultListTripletSubjectRelationObject.endpoints_ext.entities)); + for (let endpoint in resultListTripletSubjectRelationObject.endpoints_ext.entities){ + + for (let entity in resultListTripletSubjectRelationObject.endpoints_ext.entities[endpoint]){ + let ent = resultListTripletSubjectRelationObject.endpoints_ext.entities[endpoint][entity]; + + if ( !(ent in iua.uriToExternalEndpoint )) { + iua.uriToExternalEndpoint[ent] = []; + } + iua.uriToExternalEndpoint[ent].push(endpoint); + } + } + + //console.log("EXTERNAL ENDPOINT:"+JSON.stringify(iua.uriToExternalEndpoint)); /***************************** ENTITIES **************************************/ /* All information about an entity available in TPS are stored in entityInformationList */ diff --git a/askomics/static/src/js/core/IHMLocal.js b/askomics/static/src/js/core/IHMLocal.js index e292fbb7..ce1739a3 100644 --- a/askomics/static/src/js/core/IHMLocal.js +++ b/askomics/static/src/js/core/IHMLocal.js @@ -354,15 +354,17 @@ class IHMLocal { }); }); - // sorted dataTable - $('.datasets-table').DataTable({ + if ( ! $.fn.dataTable.isDataTable( '.datasets-table' ) ) { + // sorted dataTable + $('.datasets-table').DataTable({ + 'retrieve': true, 'order': [[1, 'asc']], 'columnDefs': [ { 'orderable': false, 'targets': 0 }, { type: 'date-euro', targets: 2 } ] - }); - + }); + } }); } @@ -370,7 +372,7 @@ class IHMLocal { let service = new RestServiceJs('list_endpoints'); service.getAll(function(data) { let template = AskOmics.templates.endpoints; - let context = {endpoints: data}; + let context = { endpoints: data.askomics , endpoints_ext: data.external.endpoints}; let html = template(context); $('#content_endpoints').empty(); diff --git a/askomics/static/src/js/services/AskomicsJobsManager.js b/askomics/static/src/js/services/AskomicsJobsManager.js index c9d4cb96..f7bd7e85 100644 --- a/askomics/static/src/js/services/AskomicsJobsManager.js +++ b/askomics/static/src/js/services/AskomicsJobsManager.js @@ -122,6 +122,7 @@ let instanceAskomicsJobsViewManager ; let tab2 = __ihm.getGraphBuilder().getEndpointAndGraph(); return { + 'endpoints_ext' : __ihm.getGraphBuilder().getExternalEndpoint(), 'endpoints' : tab2[0], 'graphs' : tab2[1], 'variates' : tab[0], diff --git a/askomics/static/src/templates/handlebars/endpoints.hbs b/askomics/static/src/templates/handlebars/endpoints.hbs index 6b9bc30b..cf01a732 100644 --- a/askomics/static/src/templates/handlebars/endpoints.hbs +++ b/askomics/static/src/templates/handlebars/endpoints.hbs @@ -1,5 +1,5 @@
    -

    Endpoints

    +

    AskOmics Endpoints


    @@ -31,10 +31,31 @@
    -
    + +
    +

    External Endpoints

    +
    + + + + + + + + + + {{#each endpoints_ext}} + + + + + + {{/each}} + +
    NameUrlDescription
    {{this.name}}{{this.url}}{{this.description}}
    diff --git a/askomics/upload.py b/askomics/upload.py index 1f1336df..f3fd94c2 100644 --- a/askomics/upload.py +++ b/askomics/upload.py @@ -50,11 +50,11 @@ def filepath(self, name): def validate(self, new_file): if new_file['size'] < self.min_size: - new_file['error'] = 'File is too small' + new_file['error'] = 'File is too small (See askomics.upload_min_size).' elif new_file['size'] > self.max_size: - new_file['error'] = 'File is too large' + new_file['error'] = 'File is too large (See askomics.upload_max_size).' elif new_file['type'] not in self.allowed_types: # FIXME commented for tests - new_file['error'] = 'File type '+new_file['type']+' not allowed' # FIXME commented for tests + new_file['error'] = 'File type '+new_file['type']+' not allowed (See askomics.allowed_file_types).' # FIXME commented for tests else: return True return False diff --git a/configs/development.virtuoso.ini b/configs/development.virtuoso.ini index 1ef8b31b..5ba21ab2 100644 --- a/configs/development.virtuoso.ini +++ b/configs/development.virtuoso.ini @@ -19,7 +19,6 @@ askomics.debug = true # Triplestore configuration askomics.endpoint = http://localhost:8890/sparql askomics.updatepoint = http://localhost:8890/sparql -#askomics.endpoint = http://localhost:4040/test askomics.fdendpoint=http://localhost:4040/test askomics.hack_virtuoso = true askomics.upload_user_data_method=load @@ -33,7 +32,7 @@ askomics.delete_method = DELETE # AskOmics configuration askomics.overview_lines_limit = 200 -askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values','text/fasta' +askomics.allowed_file_types = 'text/turtle','text/plain','text/csv','text/tab-separated-values','text/fasta' askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics diff --git a/configs/production.virtuoso.ini b/configs/production.virtuoso.ini index f5cf0373..a67adc02 100644 --- a/configs/production.virtuoso.ini +++ b/configs/production.virtuoso.ini @@ -12,24 +12,26 @@ pyramid.debug_authorization = false pyramid.debug_notfound = false pyramid.debug_routematch = false pyramid.default_locale_name = en -#pyramid.includes = pyramid_debugtoolbar askomics.debug=false # debugtoolbar.hosts = 127.0.0.1 ::1 # Triplestore configuration askomics.endpoint = http://localhost:8890/sparql +askomics.updatepoint = http://localhost:8890/sparql askomics.hack_virtuoso = true +askomics.fdendpoint=http://localhost:4040/test +askomics.upload_user_data_method=load askomics.max_content_size_to_update_database = 4000 askomics.graph = urn:sparql:askomics askomics.users_graph = urn:sparql:askomics:users askomics.prefix = http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology# askomics.display_setting = http://www.irisa.fr/dyliss/rdfVisualization/display -askomics.delete_method = DELETE +askomics.delete_method = DELETE #can be POST or DELETE # AskOmics configuration askomics.overview_lines_limit = 200 -askomics.allowed_file_types = 'text/plain','text/csv','text/tab-separated-values','text/fasta' +askomics.allowed_file_types = 'text/turtle','text/plain','text/csv','text/tab-separated-values','text/fasta' askomics.upload_min_size = 1 askomics.upload_max_size = 200000000 askomics.files_dir = /tmp/askomics From 6f9951f930c17f6b62d03c4aa0ddfe07bb254d4e Mon Sep 17 00:00:00 2001 From: ofilangi Date: Thu, 14 Dec 2017 11:02:09 +0100 Subject: [PATCH 012/136] fix bug when user fill signup (perist-user was called 2 times.) --- askomics/static/src/js/core/IHMLocal.js | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/askomics/static/src/js/core/IHMLocal.js b/askomics/static/src/js/core/IHMLocal.js index ce1739a3..e7179639 100644 --- a/askomics/static/src/js/core/IHMLocal.js +++ b/askomics/static/src/js/core/IHMLocal.js @@ -1112,6 +1112,7 @@ class IHMLocal { // Visual effect on active tab (Ask! / Integrate / Credits) $('.nav li').click(function(e) { + $(this).off(); //TODO : We can not defined nav li inside otherwise this function apply (define for the min nav ASKOMIS )..... // for now, to avoid a bad behaviours, we need to not defined id in sub nav tag @@ -1123,12 +1124,12 @@ class IHMLocal { $(this).addClass('active'); } - console.log("ID:"+ $(this).attr('id')); + //console.log("ID:"+ $(this).attr('id')); if ( ! ( $(this).attr('id') in { 'help' : '','admin':'', 'user_menu': '' }) ) { $('.container').hide(); $('.container#navbar_content').show(); - console.log("===>"+'.container#content_' + $(this).attr('id')); + //console.log("===>"+'.container#content_' + $(this).attr('id')); $('.container#content_' + $(this).attr('id')).show(); } else { $('.container#navbar_content').show(); @@ -1150,14 +1151,14 @@ class IHMLocal { }); // 'enter' key when password2 was filled ! - $('#signup_password2').keypress(function (e) { + $('#signup_password2').off().keypress(function (e) { if(e.which == 13) // the enter key code { $('#signup_button').click(); } }); - $('#signup_button').click(function(e) { + $('#signup_button').off().click(function(e) { let username = $('#signup_username').val(); let email = $('#signup_email').val(); let password = $('#signup_password').val(); From a644a063f6021cf6b190af48dfe9d65f53f4d24d Mon Sep 17 00:00:00 2001 From: ofilangi Date: Thu, 14 Dec 2017 14:48:12 +0100 Subject: [PATCH 013/136] link to Askomics Wiki Github for HELP/Documentation --- askomics/static/src/templates/handlebars/navbar.hbs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/askomics/static/src/templates/handlebars/navbar.hbs b/askomics/static/src/templates/handlebars/navbar.hbs index afbf45f4..618e98f3 100644 --- a/askomics/static/src/templates/handlebars/navbar.hbs +++ b/askomics/static/src/templates/handlebars/navbar.hbs @@ -23,7 +23,8 @@ {{/if}} {{/if}}
  • -
  • + +
  • {{#if loged}}