From 80a160023466553ed918a8d3402eecb7d7b07614 Mon Sep 17 00:00:00 2001 From: rmorcos Date: Tue, 29 Aug 2017 11:06:33 -0700 Subject: [PATCH 1/3] Creating inference Examples Contain test examples for inference --- .../inferenceExamples_readme.txt | 44 ++ .../src/main/java/InferenceExamples.java | 592 ++++++++++++++++++ .../src/main/java/log4j.properties | 10 + 3 files changed, 646 insertions(+) create mode 100644 extras/indexingExample/inferenceExamples_readme.txt create mode 100644 extras/indexingExample/src/main/java/InferenceExamples.java create mode 100644 extras/indexingExample/src/main/java/log4j.properties diff --git a/extras/indexingExample/inferenceExamples_readme.txt b/extras/indexingExample/inferenceExamples_readme.txt new file mode 100644 index 000000000..ead5f1b5e --- /dev/null +++ b/extras/indexingExample/inferenceExamples_readme.txt @@ -0,0 +1,44 @@ + +USAGE FOR InferenceExamples.java + +---------------- +Notes on MongoDB Setup: + +If you are using single-instance mongodb mode. + +If the first time you have installed MongoDB you will need to create a rya database. +You will also need to create a username and password for the rya database. +After logging into the MongoDB database you can use the commands below to accomplish the above. + +use rya +db.createUser({user:"urya",pwd:"urya",roles:[{role:"readWrite",db:"rya"}]}) + +---------------- + +Notes on how to use embedded and single-instance. + +To use embedded mode set the USE_EMBEDDED_MONGO to true. + +To use single-instance set the USE_EMBEDDED_MONGO to false. +Also setup the MongoUserName and MongoUserPassword. + + +---------------- + +Notes on error for single-instance. + +If you have run the InerenceExample more than once in single-instance mode, you may get an error. +This is because the tables have been created in MongoDB. +Run the below to remove the tables via client mongo login. +After running a table list you will see no tables: show tables +You then may run the examples again. + +use rya +show tables +db.rya__triples.drop() +db.rya_rya_freetext.drop() +db.rya_rya_temporal.drop() +exit + + +---------------- diff --git a/extras/indexingExample/src/main/java/InferenceExamples.java b/extras/indexingExample/src/main/java/InferenceExamples.java new file mode 100644 index 000000000..a973429d7 --- /dev/null +++ b/extras/indexingExample/src/main/java/InferenceExamples.java @@ -0,0 +1,592 @@ +import java.io.IOException; +import java.util.List; + +import org.apache.commons.lang.Validate; +import org.apache.hadoop.conf.Configuration; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.LogManager; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; +import org.apache.rya.indexing.accumulo.ConfigUtils; +import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration; +import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration.MongoDBIndexingConfigBuilder; +import org.apache.rya.mongodb.MockMongoFactory; +import org.apache.rya.mongodb.MongoConnectorFactory; +import org.apache.rya.rdftriplestore.RdfCloudTripleStore; +import org.apache.rya.rdftriplestore.inference.InferenceEngineException; +import org.apache.rya.sail.config.RyaSailFactory; +import org.apache.zookeeper.ClientCnxn; +import org.openrdf.model.Namespace; +import org.openrdf.model.URI; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.vocabulary.OWL; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.BindingSet; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.QueryEvaluationException; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.QueryResultHandlerException; +import org.openrdf.query.TupleQuery; +import org.openrdf.query.TupleQueryResultHandler; +import org.openrdf.query.TupleQueryResultHandlerException; +import org.openrdf.query.Update; +import org.openrdf.query.UpdateExecutionException; +import org.openrdf.repository.RepositoryException; +import org.openrdf.repository.RepositoryResult; +import org.openrdf.repository.sail.SailRepository; +import org.openrdf.repository.sail.SailRepositoryConnection; +import org.openrdf.sail.Sail; + +import com.mongodb.MongoClient; +import com.mongodb.ServerAddress; + + +// +//See notes in inferenceExamples_readme.txt +// + +public class InferenceExamples { + private static final Logger log = Logger.getLogger(InferenceExamples.class); + + private static final boolean IS_DETAILED_LOGGING_ENABLED = false; + + // + // Connection configuration parameters + // + + private static final boolean PRINT_QUERIES = true; + private static final String MONGO_DB = "rya"; + private static final String MONGO_COLL_PREFIX = "rya_"; + private static final boolean USE_EMBEDDED_MONGO = true; + private static final String MONGO_INSTANCE_URL = "localhost"; + private static final String MONGO_INSTANCE_PORT = "27017"; + private static final String MongoUserName="usern"; + private static final String MongoUserPassword="passwd"; + + public static void setupLogging() { + final Logger rootLogger = LogManager.getRootLogger(); + rootLogger.setLevel(Level.OFF); + final ConsoleAppender ca = (ConsoleAppender) rootLogger.getAppender("stdout"); + ca.setLayout(new PatternLayout("%d{MMM dd yyyy HH:mm:ss} %5p [%t] (%F:%L) - %m%n")); + rootLogger.setLevel(Level.INFO); + // Filter out noisy messages from the following classes. + Logger.getLogger(ClientCnxn.class).setLevel(Level.OFF); + Logger.getLogger(MockMongoFactory.class).setLevel(Level.OFF); + } + + public static void main(final String[] args) throws Exception { + if (IS_DETAILED_LOGGING_ENABLED) { + setupLogging(); + } + final Configuration conf = getConf(); + conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES); + + SailRepository repository = null; + SailRepositoryConnection conn = null; + try { + log.info("Connecting to Indexing Sail Repository."); + final Sail sail = RyaSailFactory.getInstance(conf); + repository = new SailRepository(sail); + conn = repository.getConnection(); + + + final long start = System.currentTimeMillis(); + + testInfer(conn, sail); + testPropertyChainInference(conn, sail); + testPropertyChainInferenceAltRepresentation(conn, sail); + testSomeValuesFromInference(conn, sail); + testAllValuesFromInference(conn, sail); + testIntersectionOfInference(conn, sail); + testOneOfInference(conn, sail); + + log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.); + } finally { + log.info("Shutting down"); + closeQuietly(conn); + closeQuietly(repository); + MongoConnectorFactory.closeMongoClient(); + } + } + + private static void closeQuietly(final SailRepository repository) { + if (repository != null) { + try { + repository.shutDown(); + } catch (final RepositoryException e) { + // quietly absorb this exception + } + } + } + + private static void closeQuietly(final SailRepositoryConnection conn) { + if (conn != null) { + try { + conn.close(); + } catch (final RepositoryException e) { + // quietly absorb this exception + } + } + } + + private static Configuration getConf() throws IOException { + + // MongoDBIndexingConfigBuilder builder = MongoIndexingConfiguration.builder() + // .setUseMockMongo(USE_MOCK).setUseInference(USE_INFER).setAuths("U"); + MongoDBIndexingConfigBuilder builder = MongoIndexingConfiguration.builder() + .setUseMockMongo(USE_EMBEDDED_MONGO).setUseInference(true).setAuths("U"); + + if (USE_EMBEDDED_MONGO) { + final MongoClient c = MockMongoFactory.newFactory().newMongoClient(); + final ServerAddress address = c.getAddress(); + final String url = address.getHost(); + final String port = Integer.toString(address.getPort()); + c.close(); + builder.setMongoHost(url).setMongoPort(port); + } else { + // User name and password must be filled in: + builder = builder.setMongoUser(MongoUserName) + .setMongoPassword(MongoUserPassword) + .setMongoHost(MONGO_INSTANCE_URL) + .setMongoPort(MONGO_INSTANCE_PORT); + } + + return builder.setMongoDBName(MONGO_DB) + .setMongoCollectionPrefix(MONGO_COLL_PREFIX) + .setUseMongoFreetextIndex(true) + .setMongoFreeTextPredicates(RDFS.LABEL.stringValue()).build(); + + } + + + public static void testPropertyChainInferenceAltRepresentation(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, + UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { + + // Add data + String query = "INSERT DATA\n"// + + "{ GRAPH {\n"// + + " . " + + " . " + + " . " + + " . " + + " . " + + " . " + + " . }}"; + + log.info("Performing Query"); + + Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); + update.execute(); + + query = "select ?p { GRAPH {?s / ?p}}"; + CountingResultHandler resultHandler = new CountingResultHandler(); + TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + + + // try adding a property chain and querying for it + query = "INSERT DATA\n"// + + "{ GRAPH {\n"// + + " owl:propertyChainAxiom . " + + " _:node1atjakcvbx15023 . " + + " _:node1atjakcvbx15023 . " + + " _:node1atjakcvbx15123 . " + + " _:node1atjakcvbx15123 . " + + " _:node1atjakcvbx15123 . }}"; + update = conn.prepareUpdate(QueryLanguage.SPARQL, query); + update.execute(); + ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); + + resultHandler.resetCount(); + query = "select ?x { GRAPH { ?x}}"; + resultHandler = new CountingResultHandler(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + + } + + public static void testPropertyChainInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, + UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { + + // Add data + String query = "INSERT DATA\n"// + + "{ GRAPH {\n"// + + " . " + + " . " + + " . " + + " . }}"; + + log.info("Performing Query"); + + Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); + update.execute(); + + query = "select ?p { GRAPH { / ?p}}"; + CountingResultHandler resultHandler = new CountingResultHandler(); + TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + + + // try adding a property chain and querying for it + query = "INSERT DATA\n"// + + "{ GRAPH {\n"// + + " owl:propertyChainAxiom . " + + " 3 . " + + " (0 ) . " + + " (1 ) . " + + " (2 ) . }}"; + update = conn.prepareUpdate(QueryLanguage.SPARQL, query); + update.execute(); + query = "INSERT DATA\n"// + + "{ GRAPH {\n"// + + " owl:propertyChainAxiom . " + + " 2 . " + + " (0 ) . " + + " (1 ) . }}"; + update = conn.prepareUpdate(QueryLanguage.SPARQL, query); + update.execute(); + ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); + + resultHandler.resetCount(); + query = "select ?p { GRAPH { ?p}}"; + resultHandler = new CountingResultHandler(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + + resultHandler.resetCount(); + query = "select ?s ?p { GRAPH {?s ?p}}"; + resultHandler = new CountingResultHandler(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + + } + + public static void testIntersectionOfInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { + log.info("Adding Data"); + final String instances = "INSERT DATA\n" + + "{ GRAPH {\n" + + " a . \n" + + " a . \n" + + " a . \n" + + "}}"; + Update update = conn.prepareUpdate(QueryLanguage.SPARQL, instances); + update.execute(); + final String inferQuery = "select distinct ?x { GRAPH { ?x a }}"; + final String explicitQuery = "select distinct ?x { GRAPH {\n" + + " { ?x a }\n" + + " UNION {\n" + + " ?x a .\n" + + " ?x a .\n" + + " }\n" + + "}}"; + log.info("Running Explicit Query"); + CountingResultHandler resultHandler = new CountingResultHandler(); + TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 2); + log.info("Running Inference-dependant Query"); + resultHandler.resetCount(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 1); + log.info("Adding owl:intersectionOf Schema"); + // ONTOLOGY - :Mother intersectionOf[:Woman, :Parent] + final String ontology = "INSERT DATA\n" + + "{ GRAPH {\n" + + " owl:intersectionOf _:bnode1 . \n" + + " _:bnode1 rdf:first . \n" + + " _:bnode1 rdf:rest _:bnode2 . \n" + + " _:bnode2 rdf:first . \n" + + " _:bnode2 rdf:rest rdf:nil . \n" + + "}}"; + update = conn.prepareUpdate(QueryLanguage.SPARQL, ontology); + update.execute(); + log.info("Refreshing InferenceEngine"); + ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); + log.info("Re-running Inference-dependant Query"); + resultHandler.resetCount(); + resultHandler = new CountingResultHandler(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 2); + } + + public static void testSomeValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, + UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { + final String lubm = "http://swat.cse.lehigh.edu/onto/univ-bench.owl#"; + log.info("Adding Data"); + String insert = "PREFIX lubm: <" + lubm + ">\n" + + "INSERT DATA { GRAPH {\n" + + " a lubm:Department; lubm:subOrganizationOf .\n" + + " a lubm:ResearchGroup; lubm:subOrganizationOf .\n" + + " lubm:headOf .\n" + + " lubm:headOf .\n" + + " lubm:worksFor .\n" + + "}}"; + Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); + update.execute(); + final String inferQuery = "select distinct ?x { GRAPH { ?x a <" + lubm + "Chair> }}"; + final String explicitQuery = "prefix lubm: <" + lubm + ">\n" + + "select distinct ?x { GRAPH {\n" + + " { ?x a lubm:Chair }\n" + + " UNION\n" + + " { ?x lubm:headOf [ a lubm:Department ] }\n" + + "}}"; + log.info("Running Explicit Query"); + final CountingResultHandler resultHandler = new CountingResultHandler(); + TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 1); + log.info("Running Inference-dependent Query"); + resultHandler.resetCount(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 0); + log.info("Adding owl:someValuesFrom Schema"); + insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + + "PREFIX owl: <" + OWL.NAMESPACE + ">\n" + + "PREFIX lubm: <" + lubm + ">\n" + + "INSERT DATA\n" + + "{ GRAPH {\n" + + " lubm:Chair owl:equivalentClass [ owl:onProperty lubm:headOf ; owl:someValuesFrom lubm:Department ] ." + + "}}"; + update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); + update.execute(); + log.info("Refreshing InferenceEngine"); + ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); + log.info("Re-running Inference-dependent Query"); + resultHandler.resetCount(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 1); + } + + public static void testAllValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, + UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { + log.info("Adding Data"); + String insert = "INSERT DATA\n" + + "{ GRAPH {\n" + + " a .\n" + + " .\n" + + " .\n" + + "}}"; + Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); + update.execute(); + final String inferQuery = "select distinct ?x { GRAPH { ?x a }}"; + final String explicitQuery = "select distinct ?x { GRAPH {\n" + + " { ?x a }\n" + + " UNION {\n" + + " ?y a .\n" + + " ?y ?x .\n" + + " }\n" + + "}}"; + log.info("Running Explicit Query"); + final CountingResultHandler resultHandler = new CountingResultHandler(); + TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 2); + log.info("Running Inference-dependent Query"); + resultHandler.resetCount(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 1); + log.info("Adding owl:allValuesFrom Schema"); + insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + + "PREFIX owl: <" + OWL.NAMESPACE + ">\n" + + "INSERT DATA\n" + + "{ GRAPH {\n" + + " rdfs:subClassOf [ owl:onProperty ; owl:allValuesFrom ] ." + + "}}"; + update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); + update.execute(); + log.info("Refreshing InferenceEngine"); + ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); + log.info("Re-running Inference-dependent Query"); + resultHandler.resetCount(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 2); + } + + public static void testOneOfInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { + log.info("Adding Data"); + final String instances = "INSERT DATA" + + "{ GRAPH {\n" + + " a . \n" + + " . \n" + + " . \n" + + " a . \n" + + " . \n" + + " . \n" + + " a . \n" + + " . \n" + + " . \n" + + " a . \n" + + " . \n" + + " . \n" + + " a . \n" + + " . \n" + + " . \n" + + "}}"; + Update update = conn.prepareUpdate(QueryLanguage.SPARQL, instances); + update.execute(); + final String explicitQuery = "select distinct ?card { GRAPH {\n" + + " ?card a . \n" + + " VALUES ?suit { } . \n" + + " ?card ?suit . \n" + + "}}"; + log.info("Running Explicit Query"); + CountingResultHandler resultHandler = new CountingResultHandler(); + TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 5); + log.info("Adding owl:oneOf Schema"); + // ONTOLOGY - :Suits oneOf (:Clubs, :Diamonds, :Hearts, :Spades) + // ONTOLOGY - :Ranks oneOf (:Ace, :1, :2, :3, :4, :5, :6, :7, :8, :9, :10, :Jack, :Queen, :King) + final String ontology = "INSERT DATA { GRAPH {\n" + + " owl:oneOf _:bnodeS1 . \n" + + " _:bnodeS1 rdf:first . \n" + + " _:bnodeS1 rdf:rest _:bnodeS2 . \n" + + " _:bnodeS2 rdf:first . \n" + + " _:bnodeS2 rdf:rest _:bnodeS3 . \n" + + " _:bnodeS3 rdf:first . \n" + + " _:bnodeS3 rdf:rest _:bnodeS4 . \n" + + " _:bnodeS4 rdf:first . \n" + + " _:bnodeS4 rdf:rest rdf:nil . \n" + + " owl:oneOf _:bnodeR1 . \n" + + " _:bnodeR1 rdf:first . \n" + + " _:bnodeR1 rdf:rest _:bnodeR2 . \n" + + " _:bnodeR2 rdf:first . \n" + + " _:bnodeR2 rdf:rest _:bnodeR3 . \n" + + " _:bnodeR3 rdf:first . \n" + + " _:bnodeR3 rdf:rest _:bnodeR4 . \n" + + " _:bnodeR4 rdf:first . \n" + + " _:bnodeR4 rdf:rest _:bnodeR5 . \n" + + " _:bnodeR5 rdf:first . \n" + + " _:bnodeR5 rdf:rest _:bnodeR6 . \n" + + " _:bnodeR6 rdf:first . \n" + + " _:bnodeR6 rdf:rest _:bnodeR7 . \n" + + " _:bnodeR7 rdf:first . \n" + + " _:bnodeR7 rdf:rest _:bnodeR8 . \n" + + " _:bnodeR8 rdf:first . \n" + + " _:bnodeR8 rdf:rest _:bnodeR9 . \n" + + " _:bnodeR9 rdf:first . \n" + + " _:bnodeR9 rdf:rest _:bnodeR10 . \n" + + " _:bnodeR10 rdf:first . \n" + + " _:bnodeR10 rdf:rest _:bnodeR11 . \n" + + " _:bnodeR11 rdf:first . \n" + + " _:bnodeR11 rdf:rest _:bnodeR12 . \n" + + " _:bnodeR12 rdf:first . \n" + + " _:bnodeR12 rdf:rest _:bnodeR13 . \n" + + " _:bnodeR13 rdf:first . \n" + + " _:bnodeR13 rdf:rest rdf:nil . \n" + + " owl:intersectionOf (\n" + + " [ owl:onProperty ; owl:someValuesFrom ]\n" + + " [ owl:onProperty ; owl:someValuesFrom ]\n" + + " ) . \n" + + " owl:range . \n" + + " owl:range . \n" + + "}}"; + update = conn.prepareUpdate(QueryLanguage.SPARQL, ontology); + update.execute(); + log.info("Running Inference-dependent Query without refreshing InferenceEngine"); + resultHandler.resetCount(); + final String inferQuery = "select distinct ?card { GRAPH {\n" + + " ?card a . \n" + + " ?suit a . \n" + + " ?card ?suit . \n" + + "}}"; + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 0); + log.info("Refreshing InferenceEngine"); + ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); + log.info("Re-running Inference-dependent Query"); + resultHandler.resetCount(); + resultHandler = new CountingResultHandler(); + tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + Validate.isTrue(resultHandler.getCount() == 5); + } + + public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, + UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException { + + // Add data + String query = "INSERT DATA\n"// + + "{ \n"// + + " . " + + " . }"; + + log.info("Performing Query"); + + final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); + update.execute(); + + // refresh the graph for inferencing (otherwise there is a five minute wait) + ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph(); + + query = "select ?s { ?s . }"; + final CountingResultHandler resultHandler = new CountingResultHandler(); + final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.evaluate(resultHandler); + log.info("Result count : " + resultHandler.getCount()); + + Validate.isTrue(resultHandler.getCount() == 1); + + resultHandler.resetCount(); + } + + private static class CountingResultHandler implements TupleQueryResultHandler { + private int count = 0; + + public int getCount() { + return count; + } + + public void resetCount() { + count = 0; + } + + @Override + public void startQueryResult(final List arg0) throws TupleQueryResultHandlerException { + } + + @Override + public void handleSolution(final BindingSet arg0) throws TupleQueryResultHandlerException { + count++; + System.out.println(arg0); + } + + @Override + public void endQueryResult() throws TupleQueryResultHandlerException { + } + + @Override + public void handleBoolean(final boolean arg0) throws QueryResultHandlerException { + } + + @Override + public void handleLinks(final List arg0) throws QueryResultHandlerException { + } + } +} diff --git a/extras/indexingExample/src/main/java/log4j.properties b/extras/indexingExample/src/main/java/log4j.properties new file mode 100644 index 000000000..b7347ead5 --- /dev/null +++ b/extras/indexingExample/src/main/java/log4j.properties @@ -0,0 +1,10 @@ +# Set root logger level to INFO and its only appender to A1. +log4j.rootLogger=INFO, A1 + +# A1 is set to be a ConsoleAppender. +log4j.appender.A1=org.apache.log4j.ConsoleAppender + +# A1 uses PatternLayout. +log4j.appender.A1.layout=org.apache.log4j.PatternLayout +log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n + From eb77a6e65f60efed7b5453f1bb253d3ff4206804 Mon Sep 17 00:00:00 2001 From: rmorcos Date: Wed, 6 Sep 2017 19:26:52 -0700 Subject: [PATCH 2/3] Removed readyme in root of indexingExample infernceExamples --- .../inferenceExamples_readme.txt | 44 ------------------- 1 file changed, 44 deletions(-) delete mode 100644 extras/indexingExample/inferenceExamples_readme.txt diff --git a/extras/indexingExample/inferenceExamples_readme.txt b/extras/indexingExample/inferenceExamples_readme.txt deleted file mode 100644 index ead5f1b5e..000000000 --- a/extras/indexingExample/inferenceExamples_readme.txt +++ /dev/null @@ -1,44 +0,0 @@ - -USAGE FOR InferenceExamples.java - ----------------- -Notes on MongoDB Setup: - -If you are using single-instance mongodb mode. - -If the first time you have installed MongoDB you will need to create a rya database. -You will also need to create a username and password for the rya database. -After logging into the MongoDB database you can use the commands below to accomplish the above. - -use rya -db.createUser({user:"urya",pwd:"urya",roles:[{role:"readWrite",db:"rya"}]}) - ----------------- - -Notes on how to use embedded and single-instance. - -To use embedded mode set the USE_EMBEDDED_MONGO to true. - -To use single-instance set the USE_EMBEDDED_MONGO to false. -Also setup the MongoUserName and MongoUserPassword. - - ----------------- - -Notes on error for single-instance. - -If you have run the InerenceExample more than once in single-instance mode, you may get an error. -This is because the tables have been created in MongoDB. -Run the below to remove the tables via client mongo login. -After running a table list you will see no tables: show tables -You then may run the examples again. - -use rya -show tables -db.rya__triples.drop() -db.rya_rya_freetext.drop() -db.rya_rya_temporal.drop() -exit - - ----------------- From fbe51fa06aafa744db5342bc6b1847b3d9444b7f Mon Sep 17 00:00:00 2001 From: rmorcos Date: Tue, 26 Sep 2017 18:14:06 -0700 Subject: [PATCH 3/3] Fixed requested changes to inference Examples --- extras/indexingExample/src/main/java/InferenceExamples.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/extras/indexingExample/src/main/java/InferenceExamples.java b/extras/indexingExample/src/main/java/InferenceExamples.java index a973429d7..4d232f12a 100644 --- a/extras/indexingExample/src/main/java/InferenceExamples.java +++ b/extras/indexingExample/src/main/java/InferenceExamples.java @@ -43,9 +43,6 @@ import com.mongodb.ServerAddress; -// -//See notes in inferenceExamples_readme.txt -// public class InferenceExamples { private static final Logger log = Logger.getLogger(InferenceExamples.class); @@ -67,7 +64,6 @@ public class InferenceExamples { public static void setupLogging() { final Logger rootLogger = LogManager.getRootLogger(); - rootLogger.setLevel(Level.OFF); final ConsoleAppender ca = (ConsoleAppender) rootLogger.getAppender("stdout"); ca.setLayout(new PatternLayout("%d{MMM dd yyyy HH:mm:ss} %5p [%t] (%F:%L) - %m%n")); rootLogger.setLevel(Level.INFO);