Skip to content

Commit

Permalink
remove comunica-ldf-client
Browse files Browse the repository at this point in the history
  • Loading branch information
smurp committed Aug 9, 2019
1 parent 89ec0a5 commit 97891af
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 188 deletions.
99 changes: 6 additions & 93 deletions lib/huviz.js
Expand Up @@ -9020,7 +9020,7 @@ OnceRunner.prototype.makeWrapper = function(callback) {
};

Huviz.prototype.auto_discover_name_for = function(namelessUri) {
var aUrl, args, domainName, downloadUrl, e, hasDomainName, retval, serverUrl, try_even_though_CORS_should_block, _ref, _ref1;
var aUrl, args, downloadUrl, e, hasDomainName, retval, serverUrl, try_even_though_CORS_should_block, _ref;
if (namelessUri.startsWith('_')) {
return;
}
Expand Down Expand Up @@ -9080,18 +9080,6 @@ OnceRunner.prototype.makeWrapper = function(callback) {
}
return;
}
_ref1 = this.domain2ldfServer;
for (domainName in _ref1) {
serverUrl = _ref1[domainName];
args = {
namelessUri: namelessUri,
serverUrl: serverUrl
};
if (hasDomainName(domainName) || domainName === '*') {
this.run_ldf_name_query(args);
return;
}
}
};

Huviz.prototype.discover_names_including = function(includes) {
Expand Down Expand Up @@ -9226,13 +9214,6 @@ OnceRunner.prototype.makeWrapper = function(callback) {
success_handler(data, textStatus, jqXHR, queryManager);
};

Huviz.prototype.domain2ldfServer = {
'dbpedia.org': "http://fragments.dbpedia.org/2016-04/en",
'viaf.org': "http://data.linkeddatafragments.org/viaf",
'getty.edu': "http://data.linkeddatafragments.org/lov",
'*': "http://data.linkeddatafragments.org/lov"
};

Huviz.prototype.default_name_query_args = {
predicates: [RDFS_label, FOAF_name, SCHEMA_name],
limit: 20
Expand Down Expand Up @@ -9399,71 +9380,6 @@ OnceRunner.prototype.makeWrapper = function(callback) {
}
};

Huviz.prototype.run_ldf_name_query = function(args) {
var defaults, namelessUri;
namelessUri = args.namelessUri;
args.query = "# " + (args.comment || ("run_ldf_name_query(" + namelessUri + ")")) + "\n" + this.make_name_query(namelessUri);
defaults = {
success_handler: this.generic_name_success_handler,
result_handler: this.name_result_handler,
from_N3: true,
default_terms: {
s: namelessUri,
p: RDFS_label
}
};
args = this.compose_object_from_defaults_and_incoming(defaults, args);
return this.run_managed_query_ldf(args);
};

Huviz.prototype.run_managed_query_ldf = function(args) {
var error_callback, ldf_worker, query, queryManager, result_handler, serverUrl, success_handler, timeout;
queryManager = this.run_managed_query_abstract(args);
success_handler = args.success_handler, error_callback = args.error_callback, timeout = args.timeout, result_handler = args.result_handler, serverUrl = args.serverUrl, query = args.query;
if (serverUrl == null) {
serverUrl = "http://fragments.dbpedia.org/2016-04/en";
}
ldf_worker = new Worker('/comunica-ldf-client/ldf-client-worker.min.js');
ldf_worker.postMessage({
type: 'query',
query: query,
resultsToTree: false,
context: {
'@comunica/actor-http-memento:datetime': null,
queryFormat: 'sparql',
sources: [
{
type: 'auto',
value: serverUrl
}
]
}
});
ldf_worker.onmessage = (function(_this) {
return function(event) {
var d, result, type;
queryManager.cancelAnimation();
d = event.data;
type = d.type, result = d.result;
switch (type) {
case 'result':
queryManager.incrResultCount();
return result_handler.call(_this, result, queryManager);
case 'error':
return queryManager.fatalError(d);
case 'end':
return queryManager.finishCounting();
case 'queryInfo':
case 'log':
break;
default:
return console.log("UNHANDLED", event);
}
};
})(this);
return queryManager;
};

Huviz.prototype.make_wikidata_name_query = function(uri, langs) {
var prefixes, subj;
if (uri == null) {
Expand Down Expand Up @@ -12482,9 +12398,6 @@ OnceRunner.prototype.makeWrapper = function(callback) {
args.serverUrl = serverUrl;
args.serverType = serverType;
switch (serverType) {
case 'ldf':
this.run_managed_query_ldf(args);
break;
case 'sparql':
this.run_managed_query_ajax(args);
break;
Expand All @@ -12496,7 +12409,11 @@ OnceRunner.prototype.makeWrapper = function(callback) {
Huviz.prototype.domain2sparqlEndpoint = {
'cwrc.ca': 'http://sparql.cwrc.ca/sparql',
'getty.edu': 'http://vocab.getty.edu/sparql.tsv',
'openstreetmap.org': 'https://sophox.org/sparql'
'openstreetmap.org': 'https://sophox.org/sparql',
'dbpedia.org': "http://fragments.dbpedia.org/2016-04/en",
'viaf.org': "http://data.linkeddatafragments.org/viaf",
'getty.edu': "http://data.linkeddatafragments.org/lov",
'*': "http://data.linkeddatafragments.org/lov"
};

Huviz.prototype.get_server_for_dataset = function(datasetUri) {
Expand All @@ -12508,12 +12425,8 @@ OnceRunner.prototype.makeWrapper = function(callback) {
serverUrl = this.endpoint_loader.value;
} else if ((serverUrl = this.domain2sparqlEndpoint[domain])) {
serverType = 'sparql';
} else if ((serverUrl = this.domain2sparqlEndpoint[domain])) {
serverType = 'ldf';
} else if ((serverUrl = this.domain2sparqlEndpoint['*'])) {
serverType = 'sparql';
} else if ((serverUrl = this.domain2ldfServer['*'])) {
serverType = 'ldf';
} else {
throw new Error("a server could not be found for " + datasetUri);
}
Expand Down
1 change: 0 additions & 1 deletion package.json
Expand Up @@ -20,7 +20,6 @@
"async": "~0.2.9",
"coffeescript": "~1.12.7",
"components-jqueryui": "components/jqueryui",
"comunica-ldf-client": "smurp/comunica-ldf-client",
"d3": "~3.5.17",
"ejs": "2.6.1",
"express": "~4.17.1",
Expand Down
1 change: 0 additions & 1 deletion server.coffee
Expand Up @@ -72,7 +72,6 @@ app.use('/jquery-simulate-ext__libs',
app.use('/jquery-simulate-ext__src',
express.static(__dirname + '/node_modules/jquery-simulate-ext/src'))
app.use('/d3', express.static(__dirname + '/node_modules/d3'))
app.use('/comunica-ldf-client', express.static(__dirname + '/node_modules/comunica-ldf-client/dist'))
# Ideally we would do this....
# `app.use('/quaff-lod', express.static(__dirname + '/node_modules/quaff-lod/'))`
# but that fails while quaff-lod is being referenced as a symlink in package.json
Expand Down
2 changes: 0 additions & 2 deletions server.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

95 changes: 4 additions & 91 deletions src/huviz.coffee
Expand Up @@ -3459,15 +3459,6 @@ class Huviz
@discover_geoname_name(aUrl)
return

# As a final backstop we use LDF. Why last? To spare the LDF server.
# The endpoint of authority is superior because it ought to be up to date.
for domainName, serverUrl of @domain2ldfServer
args =
namelessUri: namelessUri
serverUrl: serverUrl
if hasDomainName(domainName) or domainName is '*'
@run_ldf_name_query(args)
return
return

discover_names_including: (includes) ->
Expand Down Expand Up @@ -3589,31 +3580,6 @@ class Huviz
success_handler(data, textStatus, jqXHR, queryManager)
return

# ## Linked Data Fragments (LDF)
#
# Linked Data Fragments is a technique for performing efficient federated searches.
#
# http://linkeddatafragments.org/
#
# This implementation makes use of
#
# https://github.com/smurp/comunica-ldf-client
#
# which is a fork of:
# https://github.com/comunica/jQuery-Widget.js
#
# with the only real difference being a dist version of ldf-client-worker.min.js

domain2ldfServer:
# values feed LDF client context.sources.value # see run_managed_query_ldf
'dbpedia.org': "http://fragments.dbpedia.org/2016-04/en"
'viaf.org': "http://data.linkeddatafragments.org/viaf"
'getty.edu': "http://data.linkeddatafragments.org/lov"
'*': "http://data.linkeddatafragments.org/lov"
#'wikidata.org':
# source: "https://query.wikidata.org/bigdata/ldf"
# TODO handle "wikidata.org"

default_name_query_args:
predicates: [RDFS_label, FOAF_name, SCHEMA_name]
limit: 20 # set to `false` for no limit
Expand Down Expand Up @@ -3775,56 +3741,6 @@ class Huviz
console.error(error)
return

run_ldf_name_query: (args) ->
{namelessUri} = args
args.query = "# " +
( args.comment or "run_ldf_name_query(#{namelessUri})") + "\n" +
@make_name_query(namelessUri)
defaults =
success_handler: @generic_name_success_handler
result_handler: @name_result_handler
from_N3: true
default_terms:
s: namelessUri
p: RDFS_label
args = @compose_object_from_defaults_and_incoming(defaults, args)
@run_managed_query_ldf(args)

run_managed_query_ldf: (args) ->
queryManager = @run_managed_query_abstract(args)
{success_handler, error_callback, timeout, result_handler, serverUrl, query} = args
serverUrl ?= "http://fragments.dbpedia.org/2016-04/en" # TODO what?
ldf_worker = new Worker('/comunica-ldf-client/ldf-client-worker.min.js')
ldf_worker.postMessage
type: 'query'
query: query
resultsToTree: false # TODO experiment with this
context:
'@comunica/actor-http-memento:datetime': null
queryFormat: 'sparql'
sources: [
type: 'auto'
value: serverUrl
]

ldf_worker.onmessage = (event) =>
queryManager.cancelAnimation()
d = event.data
{type, result} = d
switch type
when 'result'
queryManager.incrResultCount()
result_handler.call(this, result, queryManager)
when 'error'
queryManager.fatalError(d)
when 'end'
queryManager.finishCounting()
when 'queryInfo', 'log'
#console.log(type, event)
else
console.log("UNHANDLED", event)

return queryManager

# ## Examples and Tests START

Expand Down Expand Up @@ -6380,8 +6296,6 @@ class Huviz
args.serverUrl = serverUrl
args.serverType = serverType
switch serverType
when 'ldf'
@run_managed_query_ldf(args)
when 'sparql'
@run_managed_query_ajax(args)
else
Expand All @@ -6392,6 +6306,10 @@ class Huviz
'cwrc.ca': 'http://sparql.cwrc.ca/sparql'
'getty.edu': 'http://vocab.getty.edu/sparql.tsv'
'openstreetmap.org': 'https://sophox.org/sparql'
'dbpedia.org': "http://fragments.dbpedia.org/2016-04/en"
'viaf.org': "http://data.linkeddatafragments.org/viaf"
'getty.edu': "http://data.linkeddatafragments.org/lov"
'*': "http://data.linkeddatafragments.org/lov"

get_server_for_dataset: (datasetUri) ->
aUrl = new URL(datasetUri)
Expand All @@ -6406,14 +6324,9 @@ class Huviz
# based on the domain sought.
else if (serverUrl = @domain2sparqlEndpoint[domain])
serverType = 'sparql'
else if (serverUrl = @domain2sparqlEndpoint[domain])
serverType = 'ldf'
# Then try to find wildcard servers '*', if available.
# Give precedence to sparql over ldf.
else if (serverUrl = @domain2sparqlEndpoint['*'])
serverType = 'sparql'
else if (serverUrl = @domain2ldfServer['*'])
serverType = 'ldf'
else
throw new Error("a server could not be found for #{datasetUri}")
return {serverType, serverUrl}
Expand Down

0 comments on commit 97891af

Please sign in to comment.