From c8a40e73d458911fc8d5265baa48c2c865aa3eee Mon Sep 17 00:00:00 2001 From: Caleb Meier Date: Mon, 7 Aug 2017 21:22:00 -0700 Subject: [PATCH] RYA-319-Integration of Periodic Query with CLI --- .../org/apache/rya/api/client/CreatePCJ.java | 4 +- .../rya/api/client/CreatePeriodicPCJ.java | 40 +++ .../rya/api/client/DeletePeriodicPCJ.java | 38 +++ .../api/client/ListIncrementalQueries.java | 38 +++ .../org/apache/rya/api/client/RyaClient.java | 31 +++ extras/indexing/pom.xml | 5 +- .../accumulo/AccumuloCreatePeriodicPCJ.java | 145 +++++++++++ .../accumulo/AccumuloDeletePeriodicPCJ.java | 135 +++++++++++ .../AccumuloListIncrementalQueries.java | 101 ++++++++ .../accumulo/AccumuloRyaClientFactory.java | 3 + extras/rya.pcj.fluo/pcj.fluo.api/pom.xml | 4 + .../indexing/pcj/fluo/api/CreateFluoPcj.java | 24 +- .../pcj/fluo/api/CreatePeriodicQuery.java | 215 +++++++++++++++++ .../pcj/fluo/api/DeletePeriodicQuery.java | 92 +++++++ .../pcj/fluo/api/ListFluoQueries.java | 149 ++++++++++++ .../fluo/app/IncrementalUpdateConstants.java | 1 + .../pcj/fluo/app/export/ExporterManager.java | 23 +- .../pcj/fluo/app/export/NoOpExporter.java | 59 ----- .../KafkaBindingSetExporterParameters.java | 1 - .../rya/PeriodicBindingSetExporter.java | 2 +- .../app/observers/QueryResultObserver.java | 4 +- .../pcj/fluo/app/query/FluoQuery.java | 6 +- .../pcj/fluo/app/query/FluoQueryColumns.java | 1 + .../fluo/app/query/FluoQueryMetadataDAO.java | 6 +- .../pcj/fluo/api/ListFluoQueriesIT.java | 96 ++++++++ .../pcj/fluo/integration/BatchIT.java | 12 +- .../pcj/fluo/integration/CreateDeleteIT.java | 3 +- .../integration/CreateDeletePeriodicPCJ.java | 227 ++++++++++++++++++ .../pcj/fluo/integration/KafkaExportIT.java | 24 +- .../pcj/fluo/integration/QueryIT.java | 40 ++- .../pcj/fluo/test/base/KafkaExportITBase.java | 17 +- .../periodic.service.api/.gitignore | 1 + .../periodic.service.api/pom.xml | 52 ++++ .../periodic/notification/api/BinPruner.java | 2 +- .../notification/api/BindingSetExporter.java | 5 +- .../notification/api}/BindingSetRecord.java | 2 +- .../api/BindingSetRecordExportException.java | 45 ++++ .../periodic/notification/api/LifeCycle.java | 0 .../periodic/notification/api/NodeBin.java | 0 .../notification/api/Notification.java | 0 .../api/NotificationCoordinatorExecutor.java | 0 .../api/NotificationProcessor.java | 0 .../api/PeriodicNotificationClient.java | 0 .../notification/BasicNotification.java | 0 .../notification/CommandNotification.java | 0 .../notification/PeriodicNotification.java | 0 .../notification/TimestampedNotification.java | 0 .../KafkaNotificationRegistrationClient.java | 2 +- .../BasicNotificationTypeAdapter.java | 0 .../serialization/BindingSetSerDe.java | 0 .../CommandNotificationSerializer.java | 0 .../CommandNotificationTypeAdapter.java | 0 .../PeriodicNotificationTypeAdapter.java | 0 .../pom.xml | 29 +-- .../PeriodicNotificationApplicationIT.java | 102 ++++---- .../PeriodicNotificationProviderIT.java | 5 +- .../PeriodicNotificationExporterIT.java | 1 + .../PeriodicNotificationProcessorIT.java | 2 +- .../PeriodicNotificationBinPrunerIT.java | 7 +- ...PeriodicCommandNotificationConsumerIT.java | 31 ++- .../periodic.service.notification/pom.xml | 201 ++++++++-------- .../notification/api/CreatePeriodicQuery.java | 124 ---------- .../PeriodicNotificationApplication.java | 2 +- ...eriodicNotificationApplicationFactory.java | 2 +- .../exporter/KafkaExporterExecutor.java | 1 + .../KafkaPeriodicBindingSetExporter.java | 9 +- .../NotificationProcessorExecutor.java | 2 +- .../TimestampedNotificationProcessor.java | 2 +- extras/rya.periodic.service/pom.xml | 1 + .../apache/rya/shell/RyaAdminCommands.java | 81 ++++++- .../rya/shell/RyaAdminCommandsTest.java | 65 +++++ pom.xml | 20 ++ 72 files changed, 1881 insertions(+), 461 deletions(-) create mode 100644 common/rya.api/src/main/java/org/apache/rya/api/client/CreatePeriodicPCJ.java create mode 100644 common/rya.api/src/main/java/org/apache/rya/api/client/DeletePeriodicPCJ.java create mode 100644 common/rya.api/src/main/java/org/apache/rya/api/client/ListIncrementalQueries.java create mode 100644 extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePeriodicPCJ.java create mode 100644 extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePeriodicPCJ.java create mode 100644 extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloListIncrementalQueries.java create mode 100644 extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreatePeriodicQuery.java create mode 100644 extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeletePeriodicQuery.java create mode 100644 extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueries.java delete mode 100644 extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java create mode 100644 extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueriesIT.java create mode 100644 extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeletePeriodicPCJ.java create mode 100644 extras/rya.periodic.service/periodic.service.api/.gitignore create mode 100644 extras/rya.periodic.service/periodic.service.api/pom.xml rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/BinPruner.java (95%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/BindingSetExporter.java (86%) rename extras/rya.periodic.service/{periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter => periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api}/BindingSetRecord.java (97%) create mode 100644 extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetRecordExportException.java rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/LifeCycle.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/NodeBin.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/Notification.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/NotificationCoordinatorExecutor.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/NotificationProcessor.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/api/PeriodicNotificationClient.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/notification/BasicNotification.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/notification/CommandNotification.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/notification/PeriodicNotification.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/notification/TimestampedNotification.java (100%) rename extras/rya.periodic.service/{periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/registration/kafka => periodic.service.api/src/main/java/org/apache/rya/periodic/notification/registration}/KafkaNotificationRegistrationClient.java (97%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/serialization/BasicNotificationTypeAdapter.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationSerializer.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationTypeAdapter.java (100%) rename extras/rya.periodic.service/{periodic.service.notification => periodic.service.api}/src/main/java/org/apache/rya/periodic/notification/serialization/PeriodicNotificationTypeAdapter.java (100%) delete mode 100644 extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java diff --git a/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java b/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java index 6e92b28e8..3c369d83d 100644 --- a/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java +++ b/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java @@ -28,7 +28,7 @@ */ @DefaultAnnotation(NonNull.class) public interface CreatePCJ { - + /** * Metadata enum used to indicate the type of query that is registered. If * the topmost node is a Construct QueryNode, then the type is Construct. If the @@ -44,7 +44,7 @@ public static enum QueryType{CONSTRUCT, PROJECTION, PERIODIC}; * Application. * */ - public static enum ExportStrategy{RYA, KAFKA, NO_OP_EXPORT}; + public static enum ExportStrategy{RYA, KAFKA, PERIODIC}; /** diff --git a/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePeriodicPCJ.java b/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePeriodicPCJ.java new file mode 100644 index 000000000..7c006d02b --- /dev/null +++ b/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePeriodicPCJ.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.api.client; + +/** + * This class creates new PeriodicPCJ for a given Rya instance. + */ +public interface CreatePeriodicPCJ { + + /** + * Creates a new PeriodicPCJ for a given Rya instance. The provided periodicTopic and bootStrapServers are used for + * registering new PeriodiNotifications with the underlying notification registration service. Typically, the + * bootStrapServers are the IP for the KafkaBrokers. + * + * @param instanceName - Rya instance to connect to + * @param sparql - SPARQL query registered with the Periodic Service + * @param periodicTopic - Kafka topic that new PeriodicNotifications are exported to for registration with the + * PeriodicService + * @param bootStrapServers - Connection string for Kafka brokers + * @return Fluo Query Id of the registered Periodic Query + */ + public String createPeriodicPCJ(String instanceName, String sparql, String periodicTopic, String bootStrapServers) throws RyaClientException; + +} diff --git a/common/rya.api/src/main/java/org/apache/rya/api/client/DeletePeriodicPCJ.java b/common/rya.api/src/main/java/org/apache/rya/api/client/DeletePeriodicPCJ.java new file mode 100644 index 000000000..c30afd2aa --- /dev/null +++ b/common/rya.api/src/main/java/org/apache/rya/api/client/DeletePeriodicPCJ.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.api.client; + +/** + * Deletes and instance of a Periodic PCJ from Rya + */ +public interface DeletePeriodicPCJ { + + /** + * Deletes a PCJ from an instance of Rya. + * + * @param instanceName - Indicates which Rya instance is maintaining the Periodic PCJ. (not null) + * @param pcjId - The ID of the Periodic PCJ that will be deleted. (not null) + * @param topic - Kafka topic for deleteing PeriodicNotifications + * @param brokers - Comma delimited host/port pairs for connecting to Kafka brokers. + * @throws InstanceDoesNotExistException No instance of Rya exists for the provided name. + * @throws RyaClientException Something caused the command to fail. + */ + public void deletePeriodicPCJ(String instanceName, final String pcjId, String topic, String brokers) throws InstanceDoesNotExistException, RyaClientException; + +} diff --git a/common/rya.api/src/main/java/org/apache/rya/api/client/ListIncrementalQueries.java b/common/rya.api/src/main/java/org/apache/rya/api/client/ListIncrementalQueries.java new file mode 100644 index 000000000..75e129724 --- /dev/null +++ b/common/rya.api/src/main/java/org/apache/rya/api/client/ListIncrementalQueries.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.api.client; + +/** + * Verifies that Rya instance has Fluo application enabled and lists + * all SPARQL queries maintained by the applcation. + */ +public interface ListIncrementalQueries { + + /** + * Lists all SPARQL queries maintained by the Fluo Application for a given rya instance and associated information, + * including the Fluo Query Id, the QueryType, the ExportStrategy, and the pretty-printed SPARQL query. + * + * @param ryaInstance - Rya instance whose queries are incrementally maintained by Fluo + * @return String comprised of new line delimited Strings that provide information about each query registered in + * Fluo, including the query Id, the query type, the export strategies, and the SPARQL query + * @throws RyaClientException + */ + public String listIncrementalQueries(String ryaInstance) throws RyaClientException; + +} diff --git a/common/rya.api/src/main/java/org/apache/rya/api/client/RyaClient.java b/common/rya.api/src/main/java/org/apache/rya/api/client/RyaClient.java index d1481dc14..c04bd86d6 100644 --- a/common/rya.api/src/main/java/org/apache/rya/api/client/RyaClient.java +++ b/common/rya.api/src/main/java/org/apache/rya/api/client/RyaClient.java @@ -34,6 +34,9 @@ public class RyaClient { private final Install install; private final CreatePCJ createPcj; private final DeletePCJ deletePcj; + private final CreatePeriodicPCJ createPeriodicPcj; + private final DeletePeriodicPCJ deletePeriodicPcj; + private final ListIncrementalQueries listIncrementalQueries; private final BatchUpdatePCJ bactchUpdatePCJ; private final GetInstanceDetails getInstanceDetails; private final InstanceExists instanceExists; @@ -51,6 +54,9 @@ public RyaClient( final Install install, final CreatePCJ createPcj, final DeletePCJ deletePcj, + final CreatePeriodicPCJ createPeriodicPcj, + final DeletePeriodicPCJ deletePeriodicPcj, + final ListIncrementalQueries listIncrementalQueries, final BatchUpdatePCJ batchUpdatePcj, final GetInstanceDetails getInstanceDetails, final InstanceExists instanceExists, @@ -63,6 +69,9 @@ public RyaClient( this.install = requireNonNull(install); this.createPcj = requireNonNull(createPcj); this.deletePcj = requireNonNull(deletePcj); + this.createPeriodicPcj = createPeriodicPcj; + this.deletePeriodicPcj = deletePeriodicPcj; + this.listIncrementalQueries = listIncrementalQueries; this.bactchUpdatePCJ = requireNonNull(batchUpdatePcj); this.getInstanceDetails = requireNonNull(getInstanceDetails); this.instanceExists = requireNonNull(instanceExists); @@ -96,7 +105,29 @@ public CreatePCJ getCreatePCJ() { public DeletePCJ getDeletePCJ() { return deletePcj; } + + /** + * @return An instance of {@link CreatePeridodicPCJ} that is connected to a Rya Periodic Storage + */ + public CreatePeriodicPCJ getCreatePeriodicPCJ() { + return createPeriodicPcj; + } + /** + * @return An instance of {@link DeletePeriodicPCJ} that is connected to a Rya Periodic Storage + */ + public DeletePeriodicPCJ getDeletePeriodicPCJ() { + return deletePeriodicPcj; + } + + /** + * @return An instance of {@link ListIncrementalQueries} for displaying queries that are incrementallly + * maintained by the Rya instance + */ + public ListIncrementalQueries getListIncrementalQueries() { + return listIncrementalQueries; + } + /** * @return An instance of {@link BatchUpdatePCJ} that is connect to a Rya storage * if the Rya instance supports PCJ indexing. diff --git a/extras/indexing/pom.xml b/extras/indexing/pom.xml index 7961b9f8f..16a205f6f 100644 --- a/extras/indexing/pom.xml +++ b/extras/indexing/pom.xml @@ -81,7 +81,10 @@ org.apache.rya rya.pcj.fluo.api - + + org.apache.rya + rya.periodic.service.api + org.openrdf.sesame diff --git a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePeriodicPCJ.java b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePeriodicPCJ.java new file mode 100644 index 000000000..26a25daf7 --- /dev/null +++ b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePeriodicPCJ.java @@ -0,0 +1,145 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.api.client.accumulo; + +import static java.util.Objects.requireNonNull; + +import java.util.Properties; + +import org.apache.accumulo.core.client.Connector; +import org.apache.fluo.api.client.FluoClient; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.rya.api.client.CreatePeriodicPCJ; +import org.apache.rya.api.client.GetInstanceDetails; +import org.apache.rya.api.client.InstanceDoesNotExistException; +import org.apache.rya.api.client.RyaClientException; +import org.apache.rya.api.instance.RyaDetails; +import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails; +import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails.FluoDetails; +import org.apache.rya.api.persist.RyaDAOException; +import org.apache.rya.indexing.pcj.fluo.api.CreatePeriodicQuery; +import org.apache.rya.indexing.pcj.fluo.api.CreatePeriodicQuery.PeriodicQueryCreationException; +import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException; +import org.apache.rya.indexing.pcj.storage.PcjException; +import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; +import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage; +import org.apache.rya.periodic.notification.api.PeriodicNotificationClient; +import org.apache.rya.periodic.notification.notification.CommandNotification; +import org.apache.rya.periodic.notification.registration.KafkaNotificationRegistrationClient; +import org.apache.rya.periodic.notification.serialization.CommandNotificationSerializer; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.QueryEvaluationException; +import org.openrdf.repository.RepositoryException; +import org.openrdf.sail.SailException; + +import com.google.common.base.Optional; + +/** + * Class used by the RyaClient for creating Periodic PCJ. + * + */ +public class AccumuloCreatePeriodicPCJ extends AccumuloCommand implements CreatePeriodicPCJ { + + private final GetInstanceDetails getInstanceDetails; + + /** + * Constructs an instance of {@link AccumuloCreatePeriodicPCJ}. + * + * @param connectionDetails - Details about the values that were used to create the connector to the cluster. (not null) + * @param connector - Provides programatic access to the instance of Accumulo that hosts Rya instance. (not null) + */ + public AccumuloCreatePeriodicPCJ(final AccumuloConnectionDetails connectionDetails, final Connector connector) { + super(connectionDetails, connector); + getInstanceDetails = new AccumuloGetInstanceDetails(connectionDetails, connector); + } + + @Override + public String createPeriodicPCJ(String instanceName, String sparql, String periodicTopic, String bootStrapServers) throws RyaClientException { + requireNonNull(instanceName); + requireNonNull(sparql); + + final Optional ryaDetailsHolder = getInstanceDetails.getDetails(instanceName); + final boolean ryaInstanceExists = ryaDetailsHolder.isPresent(); + if (!ryaInstanceExists) { + throw new InstanceDoesNotExistException(String.format("The '%s' instance of Rya does not exist.", instanceName)); + } + + final PCJIndexDetails pcjIndexDetails = ryaDetailsHolder.get().getPCJIndexDetails(); + final boolean pcjIndexingEnabeld = pcjIndexDetails.isEnabled(); + if (!pcjIndexingEnabeld) { + throw new RyaClientException(String.format("The '%s' instance of Rya does not have PCJ Indexing enabled.", instanceName)); + } + + // If a Fluo application is being used, task it with updating the PCJ. + final Optional fluoDetailsHolder = pcjIndexDetails.getFluoDetails(); + if (fluoDetailsHolder.isPresent()) { + final String fluoAppName = fluoDetailsHolder.get().getUpdateAppName(); + try { + return updateFluoAppAndRegisterWithKafka(instanceName, fluoAppName, sparql, periodicTopic, bootStrapServers); + } catch (RepositoryException | MalformedQueryException | SailException | QueryEvaluationException | PcjException + | RyaDAOException | PeriodicQueryCreationException e) { + throw new RyaClientException("Problem while initializing the Fluo application with the new PCJ.", e); + } catch (UnsupportedQueryException e) { + throw new RyaClientException("The new PCJ could not be initialized because it either contains an unsupported query node " + + "or an invalid ExportStrategy for the given QueryType. Projection queries can be exported to either Rya or Kafka," + + "unless they contain an aggregation, in which case they can only be exported to Kafka. Construct queries can be exported" + + "to Rya and Kafka, and Periodic queries can only be exported to Rya."); + } + } else { + throw new RyaClientException(String.format("The '%s' instance of Rya does not have PCJ Indexing enabled.", instanceName)); + } + } + + + + + private String updateFluoAppAndRegisterWithKafka(final String ryaInstance, final String fluoAppName, String sparql, String periodicTopic, String bootStrapServers) throws RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, PcjException, RyaDAOException, UnsupportedQueryException, PeriodicQueryCreationException { + requireNonNull(sparql); + requireNonNull(periodicTopic); + requireNonNull(bootStrapServers); + + final PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(getConnector(), ryaInstance); + + // Connect to the Fluo application that is updating this instance's PCJs. + final AccumuloConnectionDetails cd = super.getAccumuloConnectionDetails(); + try(final FluoClient fluoClient = new FluoClientFactory().connect( + cd.getUsername(), + new String(cd.getPassword()), + cd.getInstanceName(), + cd.getZookeepers(), + fluoAppName);) { + // Initialize the PCJ within the Fluo application. + final CreatePeriodicQuery periodicPcj = new CreatePeriodicQuery(fluoClient, periodicStorage); + PeriodicNotificationClient periodicClient = new KafkaNotificationRegistrationClient(periodicTopic, createProducer(bootStrapServers)); + return periodicPcj.withRyaIntegration(sparql, periodicClient, getConnector(), ryaInstance).getQueryId(); + } + } + + + private static KafkaProducer createProducer(String bootStrapServers) { + Properties props = new Properties(); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, CommandNotificationSerializer.class.getName()); + return new KafkaProducer<>(props); + } + +} diff --git a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePeriodicPCJ.java b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePeriodicPCJ.java new file mode 100644 index 000000000..18e49dca5 --- /dev/null +++ b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePeriodicPCJ.java @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.api.client.accumulo; + +import static java.util.Objects.requireNonNull; + +import java.util.Properties; + +import org.apache.accumulo.core.client.Connector; +import org.apache.fluo.api.client.FluoClient; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.rya.api.client.DeletePeriodicPCJ; +import org.apache.rya.api.client.GetInstanceDetails; +import org.apache.rya.api.client.InstanceDoesNotExistException; +import org.apache.rya.api.client.RyaClientException; +import org.apache.rya.api.instance.RyaDetails; +import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails; +import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails.FluoDetails; +import org.apache.rya.indexing.pcj.fluo.api.DeletePeriodicQuery; +import org.apache.rya.indexing.pcj.fluo.api.DeletePeriodicQuery.QueryDeletionException; +import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException; +import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; +import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage; +import org.apache.rya.periodic.notification.api.PeriodicNotificationClient; +import org.apache.rya.periodic.notification.notification.CommandNotification; +import org.apache.rya.periodic.notification.registration.KafkaNotificationRegistrationClient; +import org.apache.rya.periodic.notification.serialization.CommandNotificationSerializer; +import org.openrdf.query.MalformedQueryException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Optional; + +/** + * Class used by the RyaClient and Rya Shell for deleting Periodic PCJ. + * + */ +public class AccumuloDeletePeriodicPCJ extends AccumuloCommand implements DeletePeriodicPCJ { + private static final Logger log = LoggerFactory.getLogger(AccumuloDeletePCJ.class); + + private final GetInstanceDetails getInstanceDetails; + + /** + * Constructs an instance of {@link AccumuloDeletePeriodicPCJ}. + * + * @param connectionDetails - Details about the values that were used to create the connector to the cluster. (not null) + * @param connector - Provides programmatic access to the instance of Accumulo that hosts Rya instance. (not null) + */ + public AccumuloDeletePeriodicPCJ(final AccumuloConnectionDetails connectionDetails, final Connector connector) { + super(connectionDetails, connector); + getInstanceDetails = new AccumuloGetInstanceDetails(connectionDetails, connector); + } + + @Override + public void deletePeriodicPCJ(final String instanceName, final String pcjId, String topic, String brokers) throws InstanceDoesNotExistException, RyaClientException { + requireNonNull(instanceName); + requireNonNull(pcjId); + + final Optional originalDetails = getInstanceDetails.getDetails(instanceName); + final boolean ryaInstanceExists = originalDetails.isPresent(); + if(!ryaInstanceExists) { + throw new InstanceDoesNotExistException(String.format("The '%s' instance of Rya does not exist.", instanceName)); + } + + final boolean pcjIndexingEnabled = originalDetails.get().getPCJIndexDetails().isEnabled(); + if(!pcjIndexingEnabled) { + throw new RyaClientException(String.format("The '%s' instance of Rya does not have PCJ Indexing enabled.", instanceName)); + } + + // If the PCJ was being maintained by a Fluo application, then stop that process. + final PCJIndexDetails pcjIndexDetails = originalDetails.get().getPCJIndexDetails(); + final Optional fluoDetailsHolder = pcjIndexDetails.getFluoDetails(); + + if (fluoDetailsHolder.isPresent()) { + final String fluoAppName = pcjIndexDetails.getFluoDetails().get().getUpdateAppName(); + try { + stopUpdatingPCJ(instanceName, fluoAppName, pcjId, topic, brokers); + } catch (MalformedQueryException | UnsupportedQueryException | QueryDeletionException e) { + throw new RyaClientException(String.format("Unable to delete Periodic Query with id: %s", pcjId), e); + } + } else { + log.error(String.format("Could not stop the Fluo application from updating the PCJ because the Fluo Details are " + + "missing for the Rya instance named '%s'.", instanceName)); + } + + } + + + private void stopUpdatingPCJ(final String ryaInstance, final String fluoAppName, final String pcjId, final String topic, final String brokers) throws UnsupportedQueryException, MalformedQueryException, QueryDeletionException { + requireNonNull(fluoAppName); + requireNonNull(pcjId); + + // Connect to the Fluo application that is updating this instance's PCJs. + final AccumuloConnectionDetails cd = super.getAccumuloConnectionDetails(); + try (final FluoClient fluoClient = new FluoClientFactory().connect(cd.getUsername(), new String(cd.getPassword()), + cd.getInstanceName(), cd.getZookeepers(), fluoAppName)) { + // Delete the PCJ from the Fluo App. + PeriodicQueryResultStorage periodic = new AccumuloPeriodicQueryResultStorage(getConnector(), ryaInstance); + DeletePeriodicQuery deletePeriodic = new DeletePeriodicQuery(fluoClient, periodic); + deletePeriodic.deletePeriodicQuery(pcjId, getPeriodicNotificationClient(topic, brokers)); + } + } + + + private static PeriodicNotificationClient getPeriodicNotificationClient(String topic, String brokers) throws MalformedQueryException { + return new KafkaNotificationRegistrationClient(topic, createProducer(brokers)); + } + + private static KafkaProducer createProducer(String brokers) { + Properties props = new Properties(); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, CommandNotificationSerializer.class.getName()); + return new KafkaProducer<>(props); + } + +} diff --git a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloListIncrementalQueries.java b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloListIncrementalQueries.java new file mode 100644 index 000000000..51e7d6a1f --- /dev/null +++ b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloListIncrementalQueries.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.api.client.accumulo; + +import static java.util.Objects.requireNonNull; + +import java.util.List; + +import org.apache.accumulo.core.client.Connector; +import org.apache.fluo.api.client.FluoClient; +import org.apache.rya.api.client.GetInstanceDetails; +import org.apache.rya.api.client.InstanceDoesNotExistException; +import org.apache.rya.api.client.ListIncrementalQueries; +import org.apache.rya.api.client.RyaClientException; +import org.apache.rya.api.instance.RyaDetails; +import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails; +import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails.FluoDetails; +import org.apache.rya.indexing.pcj.fluo.api.ListFluoQueries; + +import com.google.common.base.Joiner; +import com.google.common.base.Optional; + +import edu.umd.cs.findbugs.annotations.DefaultAnnotation; +import edu.umd.cs.findbugs.annotations.NonNull; + +@DefaultAnnotation(NonNull.class) +public class AccumuloListIncrementalQueries extends AccumuloCommand implements ListIncrementalQueries { + + private final GetInstanceDetails getInstanceDetails; + + public AccumuloListIncrementalQueries(final AccumuloConnectionDetails connectionDetails, final Connector connector) { + super(connectionDetails, connector); + getInstanceDetails = new AccumuloGetInstanceDetails(connectionDetails, connector); + } + + @Override + public String listIncrementalQueries(String instanceName) throws RyaClientException { + + requireNonNull(instanceName); + + final Optional ryaDetailsHolder = getInstanceDetails.getDetails(instanceName); + final boolean ryaInstanceExists = ryaDetailsHolder.isPresent(); + if (!ryaInstanceExists) { + throw new InstanceDoesNotExistException(String.format("The '%s' instance of Rya does not exist.", instanceName)); + } + + final PCJIndexDetails pcjIndexDetails = ryaDetailsHolder.get().getPCJIndexDetails(); + final boolean pcjIndexingEnabeld = pcjIndexDetails.isEnabled(); + if (!pcjIndexingEnabeld) { + throw new RyaClientException(String.format("The '%s' instance of Rya does not have PCJ Indexing enabled.", instanceName)); + } + + // If a Fluo application is being used, task it with updating the PCJ. + final Optional fluoDetailsHolder = pcjIndexDetails.getFluoDetails(); + if (fluoDetailsHolder.isPresent()) { + final String fluoAppName = fluoDetailsHolder.get().getUpdateAppName(); + try { + return getFluoQueryString(instanceName, fluoAppName); + } catch (Exception e) { + throw new RyaClientException("Problem while creating Fluo Query Strings.", e); + } + } else { + throw new RyaClientException(String.format("The '%s' instance of Rya does not have Fluo incremental updating enabled.", instanceName)); + } + } + + + private String getFluoQueryString(final String ryaInstance, final String fluoAppName) throws Exception { + + // Connect to the Fluo application that is updating this instance's PCJs. + final AccumuloConnectionDetails cd = super.getAccumuloConnectionDetails(); + try(final FluoClient fluoClient = new FluoClientFactory().connect( + cd.getUsername(), + new String(cd.getPassword()), + cd.getInstanceName(), + cd.getZookeepers(), + fluoAppName);) { + // Initialize the PCJ within the Fluo application. + ListFluoQueries listQueries = new ListFluoQueries(); + List queries = listQueries.listFluoQueries(fluoClient); + return Joiner.on("\n").join(queries); + } + } + +} diff --git a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloRyaClientFactory.java b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloRyaClientFactory.java index 5ee02f913..d9bf64484 100644 --- a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloRyaClientFactory.java +++ b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloRyaClientFactory.java @@ -52,6 +52,9 @@ public static RyaClient build( new AccumuloInstall(connectionDetails, connector), new AccumuloCreatePCJ(connectionDetails, connector), new AccumuloDeletePCJ(connectionDetails, connector), + new AccumuloCreatePeriodicPCJ(connectionDetails, connector), + new AccumuloDeletePeriodicPCJ(connectionDetails, connector), + new AccumuloListIncrementalQueries(connectionDetails, connector), new AccumuloBatchUpdatePCJ(connectionDetails, connector), new AccumuloGetInstanceDetails(connectionDetails, connector), new AccumuloInstanceExists(connectionDetails, connector), diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/pom.xml b/extras/rya.pcj.fluo/pcj.fluo.api/pom.xml index 758c481ad..16d33b2ca 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.api/pom.xml +++ b/extras/rya.pcj.fluo/pcj.fluo.api/pom.xml @@ -41,6 +41,10 @@ under the License. org.apache.rya rya.pcj.fluo.app + + org.apache.rya + rya.periodic.service.api + org.apache.rya rya.sail diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java index 501f1f54e..a988bc74d 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java +++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java @@ -38,6 +38,7 @@ import org.apache.rya.accumulo.AccumuloRdfConfiguration; import org.apache.rya.accumulo.query.AccumuloRyaQueryEngine; import org.apache.rya.api.client.CreatePCJ.ExportStrategy; +import org.apache.rya.api.client.CreatePCJ.QueryType; import org.apache.rya.api.domain.RyaStatement; import org.apache.rya.api.domain.RyaType; import org.apache.rya.api.domain.RyaURI; @@ -135,6 +136,7 @@ public CreateFluoPcj(final int spInsertBatchSize, final int joinBatchSize) { * according to the Kafka {@link ExportStrategy}. * * @param sparql - sparql query String to be registered with Fluo + * @param strategies - ExportStrategies used to specify how final results will be handled * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null) * @return The metadata that was written to the Fluo application for the PCJ. * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed. @@ -218,7 +220,17 @@ private FluoQuery makeFluoQuery(String sparql, String pcjId, Set .setSparql(sparql) .setJoinBatchSize(joinBatchSize); - return builder.build(); + FluoQuery query = builder.build(); + + if(query.getQueryType() == QueryType.PERIODIC && !Sets.newHashSet(ExportStrategy.PERIODIC).containsAll(strategies)) { + throw new UnsupportedQueryException("Periodic Queries must only utilize the PeriodicExport or the NoOpExport ExportStrategy."); + } + + if(query.getQueryType() != QueryType.PERIODIC && strategies.contains(ExportStrategy.PERIODIC)) { + throw new UnsupportedQueryException("Only Periodic Queries can utilize the PeriodicExport ExportStrategy."); + } + + return query; } private void writeFluoQuery(FluoClient fluo, FluoQuery fluoQuery, String pcjId) { @@ -283,13 +295,13 @@ public String withRyaIntegration( * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null) * @param accumulo - Accumulo connector for connecting with Accumulo * @param ryaInstance - name of Rya instance to connect to - * @return The Fluo application's Query ID of the query that was created. + * @return FluoQuery containing the metadata for the newly registered SPARQL query * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed. * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}. * @throws RyaDAOException Historic PCJ results could not be loaded because of a problem with {@code rya}. * @throws UnsupportedQueryException */ - public String withRyaIntegration( + public FluoQuery withRyaIntegration( final String pcjId, final String sparql, final Set strategies, @@ -308,7 +320,7 @@ public String withRyaIntegration( //import results already ingested into Rya that match query importHistoricResultsIntoFluo(fluo, fluoQuery, accumulo, ryaInstance); // return queryId to the caller for later monitoring from the export. - return fluoQuery.getQueryMetadata().getNodeId(); + return fluoQuery; } /** @@ -326,13 +338,13 @@ public String withRyaIntegration( * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null) * @param accumulo - Accumuo connector for connecting to Accumulo * @param ryaInstance - name of Rya instance to connect to - * @return The Fluo application's Query ID of the query that was created. + * @return FluoQuery containing the metadata for the newly registered SPARQL query * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed. * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}. * @throws RyaDAOException Historic PCJ results could not be loaded because of a problem with {@code rya}. * @throws UnsupportedQueryException */ - public String withRyaIntegration( + public FluoQuery withRyaIntegration( final String pcjId, final PrecomputedJoinStorage pcjStorage, final FluoClient fluo, diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreatePeriodicQuery.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreatePeriodicQuery.java new file mode 100644 index 000000000..24adde993 --- /dev/null +++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreatePeriodicQuery.java @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.indexing.pcj.fluo.api; + +import java.util.Optional; + +import org.apache.accumulo.core.client.Connector; +import org.apache.fluo.api.client.FluoClient; +import org.apache.rya.api.client.CreatePCJ.ExportStrategy; +import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery; +import org.apache.rya.indexing.pcj.fluo.app.query.PeriodicQueryNode; +import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException; +import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils; +import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil; +import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; +import org.apache.rya.indexing.pcj.storage.PeriodicQueryStorageException; +import org.apache.rya.periodic.notification.api.PeriodicNotificationClient; +import org.apache.rya.periodic.notification.notification.PeriodicNotification; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.algebra.evaluation.function.Function; + +import com.google.common.collect.Sets; + + +/** + * Object that creates a Periodic Query. A Periodic Query is any query + * requesting periodic updates about events that occurred within a given + * window of time of this instant. This is also known as a rolling window + * query. Period Queries can be expressed using SPARQL by including the + * {@link Function} indicated by the URI {@link PeriodicQueryUtil#PeriodicQueryURI} + * in the query. The user must provide this Function with the following arguments: + * the temporal variable in the query that will be filtered on, the window of time + * that events must occur within, the period at which the user wants to receive updates, + * and the time unit. The following query requests all observations that occurred + * within the last minute and requests updates every 15 seconds. It also performs + * a count on those observations. + *

+ *

+ *  prefix function: http://org.apache.rya/function#
+ *                "prefix time: http://www.w3.org/2006/time# 
+ *                "select (count(?obs) as ?total) where {
+ *                "Filter(function:periodic(?time, 1, .25, time:minutes))
+ *                "?obs uri:hasTime ?time.
+ *                "?obs uri:hasId ?id }
+ * 
+ *

+ * This class is responsible for taking a Periodic Query expressed as a SPARQL query + * and adding to Fluo and Kafka so that it can be processed by the {@link PeriodicNotificationApplication}. + */ +public class CreatePeriodicQuery { + + private FluoClient fluoClient; + private PeriodicQueryResultStorage periodicStorage; + + + /** + * Constructs an instance of CreatePeriodicQuery for creating periodic queries. An instance + * of CreatePeriodicQuery that is created using this constructor will not publish new PeriodicNotifications + * to Kafka. + * + * @param fluoClient - Fluo client for interacting with Fluo + * @param periodicStorage - PeriodicQueryResultStorage storing periodic query results + */ + public CreatePeriodicQuery(FluoClient fluoClient, PeriodicQueryResultStorage periodicStorage) { + this.fluoClient = fluoClient; + this.periodicStorage = periodicStorage; + } + + + /** + * Creates a Periodic Query by adding the query to Fluo and using the resulting + * Fluo id to create a {@link PeriodicQueryResultStorage} table. + * + * @param sparql - sparql query registered to Fluo whose results are stored in PeriodicQueryResultStorage table + * @return FluoQuery indicating the metadata of the registered SPARQL query + */ + public FluoQuery createPeriodicQuery(String sparql) throws PeriodicQueryCreationException { + try { + Optional optNode = PeriodicQueryUtil.getPeriodicNode(sparql); + if(optNode.isPresent()) { + String pcjId = FluoQueryUtils.createNewPcjId(); + + //register query with Fluo + CreateFluoPcj createPcj = new CreateFluoPcj(); + FluoQuery fluoQuery = createPcj.createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.PERIODIC), fluoClient); + + //register query with PeriodicResultStorage table + periodicStorage.createPeriodicQuery(pcjId, sparql); + + return fluoQuery; + } else { + throw new RuntimeException("Invalid PeriodicQuery. Query must possess a PeriodicQuery Filter."); + } + } catch (MalformedQueryException | PeriodicQueryStorageException | UnsupportedQueryException e) { + throw new PeriodicQueryCreationException(e); + } + } + + + /** + * Creates a Periodic Query by adding the query to Fluo and using the resulting + * Fluo id to create a {@link PeriodicQueryResultStorage} table. Additionally, + * the associated PeriodicNotification is registered with the Periodic Query Service. + * + * @param sparql - sparql query registered to Fluo whose results are stored in PeriodicQueryResultStorage table + * @param notificationClient - {@link PeriodicNotificationClient} for registering new PeriodicNotifications + * @return FluoQuery indicating the metadata of the registered SPARQL query + */ + public FluoQuery createPeriodicQuery(String sparql, PeriodicNotificationClient notificationClient) throws PeriodicQueryCreationException { + try { + Optional optNode = PeriodicQueryUtil.getPeriodicNode(sparql); + if(optNode.isPresent()) { + PeriodicQueryNode periodicNode = optNode.get(); + String pcjId = FluoQueryUtils.createNewPcjId(); + + //register query with Fluo + CreateFluoPcj createPcj = new CreateFluoPcj(); + FluoQuery fluoQuery = createPcj.createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.PERIODIC), fluoClient); + + //register query with PeriodicResultStorage table + periodicStorage.createPeriodicQuery(pcjId, sparql); + //create notification + PeriodicNotification notification = PeriodicNotification.builder().id(pcjId).period(periodicNode.getPeriod()) + .timeUnit(periodicNode.getUnit()).build(); + //register notification with periodic notification app + notificationClient.addNotification(notification); + + return fluoQuery; + } else { + throw new RuntimeException("Invalid PeriodicQuery. Query must possess a PeriodicQuery Filter."); + } + } catch (MalformedQueryException | PeriodicQueryStorageException | UnsupportedQueryException e) { + throw new PeriodicQueryCreationException(e); + } + } + + + /** + * Creates a Periodic Query by adding the query to Fluo and using the resulting + * Fluo id to create a {@link PeriodicQueryResultStorage} table. + * @param sparql - sparql query registered to Fluo whose results are stored in PeriodicQueryResultStorage table + * @param notificationClient - {@link PeriodicNotificationClient} for registering new PeriodicNotifications + * @param conn - Accumulo connector for connecting to the Rya instance + * @param ryaInstance - name of the Accumulo back Rya instance + * @return FluoQuery indicating the metadata of the registered SPARQL query + */ + public FluoQuery withRyaIntegration(String sparql, PeriodicNotificationClient notificationClient, Connector conn, String ryaInstance) + throws PeriodicQueryCreationException { + try { + Optional optNode = PeriodicQueryUtil.getPeriodicNode(sparql); + if (optNode.isPresent()) { + PeriodicQueryNode periodicNode = optNode.get(); + String pcjId = FluoQueryUtils.createNewPcjId(); + + // register query with Fluo + CreateFluoPcj createPcj = new CreateFluoPcj(); + FluoQuery fluoQuery = createPcj.withRyaIntegration(pcjId, sparql, Sets.newHashSet(ExportStrategy.PERIODIC), + fluoClient, conn, ryaInstance); + + // register query with PeriodicResultStorage table + periodicStorage.createPeriodicQuery(pcjId, sparql); + // create notification + PeriodicNotification notification = PeriodicNotification.builder().id(pcjId).period(periodicNode.getPeriod()) + .timeUnit(periodicNode.getUnit()).build(); + // register notification with periodic notification app + notificationClient.addNotification(notification); + + return fluoQuery; + } else { + throw new RuntimeException("Invalid PeriodicQuery. Query must possess a PeriodicQuery Filter."); + } + } catch (Exception e) { + throw new PeriodicQueryCreationException(e); + } + } + + /** + * This Exception gets thrown whenever there is an issue creating a PeriodicQuery. + * + */ + public static class PeriodicQueryCreationException extends Exception { + + private static final long serialVersionUID = 1L; + + public PeriodicQueryCreationException(Exception e) { + super(e); + } + + public PeriodicQueryCreationException(String message, Exception e) { + super(message, e); + } + + public PeriodicQueryCreationException(String message) { + super(message); + } + + } + +} diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeletePeriodicQuery.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeletePeriodicQuery.java new file mode 100644 index 000000000..4ff88da3f --- /dev/null +++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeletePeriodicQuery.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.indexing.pcj.fluo.api; + +import org.apache.fluo.api.client.FluoClient; +import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException; +import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; +import org.apache.rya.indexing.pcj.storage.PeriodicQueryStorageException; +import org.apache.rya.periodic.notification.api.PeriodicNotificationClient; +import org.apache.rya.periodic.notification.notification.BasicNotification; + +import com.google.common.base.Preconditions; + +public class DeletePeriodicQuery { + + private FluoClient fluo; + private PeriodicQueryResultStorage periodicStorage; + + public DeletePeriodicQuery(FluoClient fluo, PeriodicQueryResultStorage periodicStorage) { + this.fluo = fluo; + this.periodicStorage = periodicStorage; + } + + /** + * Deletes the Periodic Query with the indicated pcjId from Fluo and {@link PeriodicQueryResultStorage}. + * @param pcjId - Id of the Periodic Query to be deleted + */ + public void deletePeriodicQuery(String pcjId) throws QueryDeletionException { + + Preconditions.checkNotNull(pcjId); + + DeleteFluoPcj deletePcj = new DeleteFluoPcj(1000); + try { + deletePcj.deletePcj(fluo, pcjId); + periodicStorage.deletePeriodicQuery(pcjId); + } catch (UnsupportedQueryException | PeriodicQueryStorageException e) { + throw new QueryDeletionException(String.format("Unable to delete the Periodic Query with Id: %s", pcjId), e); + } + + } + + /** + * Deletes the Periodic Query with the indicated pcjId from Fluo and {@link PeriodicQueryResultStorage}. In + * addition, this method also informs the Periodic Notification Service to stop generating PeriodicNotifications + * associated with the Periodic Query. + * + * @param queryId - Id of the Periodic Query to be deleted + * @param periodicClient - Client used to inform the Periodic Notification Service to stop generating notifications + * @throws QueryDeletionException + */ + public void deletePeriodicQuery(String pcjId, PeriodicNotificationClient periodicClient) throws QueryDeletionException { + + Preconditions.checkNotNull(periodicClient); + + deletePeriodicQuery(pcjId); + periodicClient.deleteNotification(new BasicNotification(pcjId)); + } + + /** + * This Exception is thrown when a problem is encountered while deleting a + * query from the Fluo Application or the underlying storage layer. + */ + public static class QueryDeletionException extends Exception { + + private static final long serialVersionUID = 1L; + + public QueryDeletionException(String message) { + super(message); + } + + public QueryDeletionException(String message, Exception e) { + super(message, e); + } + } + +} diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueries.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueries.java new file mode 100644 index 000000000..8f5bbfeaa --- /dev/null +++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueries.java @@ -0,0 +1,149 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.indexing.pcj.fluo.api; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import org.apache.fluo.api.client.FluoClient; +import org.apache.fluo.api.client.Snapshot; +import org.apache.rya.api.client.CreatePCJ.ExportStrategy; +import org.apache.rya.api.client.CreatePCJ.QueryType; +import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO; +import org.apache.rya.indexing.pcj.fluo.app.query.QueryMetadata; +import org.openrdf.query.parser.ParsedQuery; +import org.openrdf.query.parser.sparql.SPARQLParser; +import org.openrdf.queryrender.sparql.SPARQLQueryRenderer; + +import com.google.common.base.Preconditions; + +/** + * Class for retrieving a List containing a String representation of each query maintained by Fluo. + * + */ +public class ListFluoQueries { + + private static final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO(); + + /** + * Retrieve a list of String representations of each query maintained by Fluo + * + * @param fluo - FluoClient for interacting with Fluo + * @return - List of String representations of queries maintained by Fluo. + * @throws Exception + */ + public List listFluoQueries(FluoClient fluo) throws Exception { + + List queryStrings = new ArrayList<>(); + Snapshot sx = fluo.newSnapshot(); + + List ids = new ListQueryIds().listQueryIds(fluo); + for (String id : ids) { + queryStrings.add(extractString(dao.readQueryMetadata(sx, id))); + } + + return queryStrings; + } + + private static String extractString(QueryMetadata metadata) throws Exception { + FluoQueryStringBuilder builder = new FluoQueryStringBuilder(); + return builder.setQueryId(metadata.getNodeId()).setQueryType(metadata.getQueryType()) + .setExportStrategies(metadata.getExportStrategies()).setQuery(metadata.getSparql()).build(); + } + + private static String getPrettyPrintSparql(String sparql, int indent) throws Exception { + SPARQLParser parser = new SPARQLParser(); + ParsedQuery pq = parser.parseQuery(sparql, null); + SPARQLQueryRenderer render = new SPARQLQueryRenderer(); + String renderedQuery = render.render(pq); + + //remove extra quotes generated by query renderer + String[] splitRender = renderedQuery.split("\"\"\""); + StringBuilder builder = new StringBuilder(); + for(String s: splitRender) { + builder.append(s).append("\""); + } + builder.replace(builder.length() - 1, builder.length(), ""); + + //add indent to all lines following newline char + String[] newLineRender = builder.toString().split("\n"); + builder = new StringBuilder(); + String prefix = getVariableIndent(indent); + for(int i = 0; i < newLineRender.length; i++) { + if(i != 0) { + builder.append(prefix); + } + builder.append(newLineRender[i]).append("\n"); + } + + return builder.toString(); + } + + private static String getVariableIndent(int len) { + return new String(new char[len]).replace('\0', ' '); + } + + public static class FluoQueryStringBuilder { + + private String queryId; + private String sparql; + private QueryType queryType; + private Set strategies; + + public FluoQueryStringBuilder setQueryId(String queryId) { + this.queryId = Preconditions.checkNotNull(queryId); + return this; + } + + public FluoQueryStringBuilder setQuery(String query) { + this.sparql = Preconditions.checkNotNull(query); + return this; + } + + public FluoQueryStringBuilder setExportStrategies(Set strategies) { + this.strategies = Preconditions.checkNotNull(strategies); + return this; + } + + public FluoQueryStringBuilder setQueryType(QueryType queryType) { + this.queryType = Preconditions.checkNotNull(queryType); + return this; + } + + public String build() throws Exception { + + int valueAlign = 20; + String sparqlHeader = "SPARQL: "; + String idHeader = "QUERY ID: "; + String typeHeader = "QUERY TYPE: "; + String strategiesHeader = "EXPORT STRATEGIES: "; + + StringBuilder builder = new StringBuilder(); + builder.append(idHeader).append(getVariableIndent(valueAlign - idHeader.length())).append(queryId).append("\n") + .append(typeHeader).append(getVariableIndent(valueAlign - typeHeader.length())).append(queryType).append("\n") + .append(strategiesHeader).append(getVariableIndent(valueAlign - strategiesHeader.length())).append(strategies).append("\n") + .append(sparqlHeader).append(getVariableIndent(valueAlign - sparqlHeader.length())).append(getPrettyPrintSparql(sparql, valueAlign)).append("\n"); + + return builder.toString(); + } + + } + +} diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java index c090d373b..5405837b5 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java @@ -39,6 +39,7 @@ public class IncrementalUpdateConstants { public static final String CONSTRUCT_PREFIX = "CONSTRUCT"; public static final String PERIODIC_QUERY_PREFIX = "PERIODIC_QUERY"; + //binding name reserved for periodic bin id for periodic query results public static final String PERIODIC_BIN_ID = PeriodicQueryResultStorage.PeriodicBinId; public static final String URI_TYPE = "http://www.w3.org/2001/XMLSchema#anyURI"; diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java index 62f1271f7..2cb7eff1d 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java @@ -30,7 +30,6 @@ import org.apache.rya.api.client.CreatePCJ.QueryType; import org.apache.rya.api.domain.RyaStatement; import org.apache.rya.api.domain.RyaSubGraph; -import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter; import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter.ResultExportException; import org.apache.rya.indexing.pcj.fluo.app.export.kafka.RyaSubGraphKafkaSerDe; import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery; @@ -95,16 +94,21 @@ public void export(QueryType type, Set strategies, String queryI * @throws ResultExportException */ private void exportBindingSet(Map exporters, Set strategies, String pcjId, Bytes data) throws ResultExportException { + VisibilityBindingSet bs; try { - VisibilityBindingSet bs = BS_SERDE.deserialize(data); + bs = BS_SERDE.deserialize(data); simplifyVisibilities(bs); + } catch (Exception e) { + throw new ResultExportException("Unable to deserialize the given BindingSet.", e); + } + try{ for(ExportStrategy strategy: strategies) { IncrementalBindingSetExporter exporter = (IncrementalBindingSetExporter) exporters.get(strategy); exporter.export(pcjId, bs); } } catch (Exception e) { - throw new ResultExportException("Unable to deserialize the provided BindingSet", e); + throw new ResultExportException("Unable to export the given BindingSet " + bs + " with the given set of ExportStrategies " + strategies, e); } } @@ -125,9 +129,14 @@ private void exportSubGraph(Map expor throw new ResultExportException("Undable to deserialize provided RyaSubgraph", e); } - for(ExportStrategy strategy: strategies) { - IncrementalRyaSubGraphExporter exporter = (IncrementalRyaSubGraphExporter) exporters.get(strategy); - exporter.export(pcjId, subGraph); + try { + for (ExportStrategy strategy : strategies) { + IncrementalRyaSubGraphExporter exporter = (IncrementalRyaSubGraphExporter) exporters.get(strategy); + exporter.export(pcjId, subGraph); + } + } catch (Exception e) { + throw new ResultExportException( + "Unable to export the given subgraph " + subGraph + " using all of the ExportStrategies " + strategies); } } @@ -195,8 +204,6 @@ public Builder addIncrementalResultExporter(IncrementalResultExporter exporter) * @return - ExporterManager for managing IncrementalResultExporters and exporting results */ public ExporterManager build() { - //adds NoOpExporter in the event that users does not want to Export results - addIncrementalResultExporter(new NoOpExporter()); return new ExporterManager(exporters); } diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java deleted file mode 100644 index ab7f2eda0..000000000 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.rya.indexing.pcj.fluo.app.export; - -import java.util.Set; - -import org.apache.rya.api.client.CreatePCJ.ExportStrategy; -import org.apache.rya.api.client.CreatePCJ.QueryType; -import org.apache.rya.api.domain.RyaSubGraph; -import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet; - -import com.google.common.collect.Sets; - -/** - * This class is a NoOpExporter that can be specified if a user does not - * want their results exported from Fluo. - * - */ -public class NoOpExporter implements IncrementalBindingSetExporter, IncrementalRyaSubGraphExporter { - - @Override - public Set getQueryTypes() { - return Sets.newHashSet(QueryType.CONSTRUCT, QueryType.PROJECTION); - } - - @Override - public ExportStrategy getExportStrategy() { - return ExportStrategy.NO_OP_EXPORT; - } - - @Override - public void close() throws Exception { - } - - @Override - public void export(String constructID, RyaSubGraph subgraph) throws ResultExportException { - } - - @Override - public void export(String queryId, VisibilityBindingSet result) throws ResultExportException { - } - -} diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java index 4550a502e..3687c9fef 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java @@ -26,7 +26,6 @@ import com.google.common.base.Preconditions; - public class KafkaBindingSetExporterParameters extends KafkaExportParameterBase { public static final String CONF_USE_KAFKA_BINDING_SET_EXPORTER = "pcj.fluo.export.kafka.bindingset.enabled"; diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java index 604462b2b..5a8f01cc4 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java @@ -52,7 +52,7 @@ public Set getQueryTypes() { @Override public ExportStrategy getExportStrategy() { - return ExportStrategy.RYA; + return ExportStrategy.PERIODIC; } @Override diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java index ba7beee99..e07c514d6 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java @@ -19,6 +19,7 @@ package org.apache.rya.indexing.pcj.fluo.app.observers; import static org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.NODEID_BS_DELIM; +import static org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns.QUERY_BINDING_SET; import org.apache.fluo.api.client.TransactionBase; import org.apache.fluo.api.data.Bytes; @@ -35,7 +36,6 @@ import org.apache.rya.indexing.pcj.fluo.app.export.rya.PeriodicBindingSetExporterFactory; import org.apache.rya.indexing.pcj.fluo.app.export.rya.RyaBindingSetExporterFactory; import org.apache.rya.indexing.pcj.fluo.app.export.rya.RyaSubGraphExporterFactory; -import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns; import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO; import org.apache.rya.indexing.pcj.fluo.app.query.QueryMetadata; @@ -66,7 +66,7 @@ public class QueryResultObserver extends AbstractObserver { @Override public ObservedColumn getObservedColumn() { - return new ObservedColumn(FluoQueryColumns.QUERY_BINDING_SET, NotificationType.STRONG); + return new ObservedColumn(QUERY_BINDING_SET, NotificationType.STRONG); } /** diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java index 17ab14f00..a1c7c0036 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java @@ -92,11 +92,7 @@ private FluoQuery( this.statementPatternMetadata = requireNonNull(statementPatternMetadata); this.filterMetadata = requireNonNull(filterMetadata); this.joinMetadata = requireNonNull(joinMetadata); - if(constructMetadata.isPresent()) { - this.type = QueryType.CONSTRUCT; - } else { - this.type = QueryType.PROJECTION; - } + this.type = queryMetadata.getQueryType(); } /** diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java index 8569a48d7..6ca0e8d55 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java @@ -24,6 +24,7 @@ import java.util.List; import org.apache.fluo.api.data.Column; +import org.apache.rya.api.client.CreatePCJ.QueryType; import org.apache.rya.indexing.pcj.fluo.app.AggregationResultUpdater.AggregationState; import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet; diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java index d5d9fe781..1cf2825ff 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java +++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java @@ -113,8 +113,10 @@ private QueryMetadata.Builder readQueryMetadataBuilder(final SnapshotBase sx, fi final String[] exportStrategies = values.get(FluoQueryColumns.QUERY_EXPORT_STRATEGIES).split(IncrementalUpdateConstants.VAR_DELIM); Set strategies = new HashSet<>(); - for(String strategy: exportStrategies) { - strategies.add(ExportStrategy.valueOf(strategy)); + for (String strategy : exportStrategies) { + if (!strategy.isEmpty()) { + strategies.add(ExportStrategy.valueOf(strategy)); + } } return QueryMetadata.builder(nodeId) diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueriesIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueriesIT.java new file mode 100644 index 000000000..97247046c --- /dev/null +++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListFluoQueriesIT.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.indexing.pcj.fluo.api; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.fluo.api.client.FluoClient; +import org.apache.fluo.api.client.FluoFactory; +import org.apache.fluo.api.client.Transaction; +import org.apache.rya.api.client.CreatePCJ.ExportStrategy; +import org.apache.rya.api.client.CreatePCJ.QueryType; +import org.apache.rya.indexing.pcj.fluo.api.ListFluoQueries.FluoQueryStringBuilder; +import org.apache.rya.indexing.pcj.fluo.app.NodeType; +import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO; +import org.apache.rya.indexing.pcj.fluo.app.query.QueryMetadata; +import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder; +import org.apache.rya.pcj.fluo.test.base.RyaExportITBase; +import org.junit.Assert; +import org.junit.Test; + +import com.google.common.collect.Sets; + + +public class ListFluoQueriesIT extends RyaExportITBase { + + @Test + public void queryMetadataTest() throws Exception { + final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO(); + + String sparql1 = "select ?x ?y ?z where {?x ?y; 'literal1'. ?y ?z }"; + String sparql2 = "select ?x ?y ?z where {{select ?x ?y ?z {?x ?y; ?z. ?y ?z }}}"; + + // Create the object that will be serialized. + String queryId1 = NodeType.generateNewFluoIdForType(NodeType.QUERY); + final QueryMetadata.Builder builder = QueryMetadata.builder(queryId1); + builder.setQueryType(QueryType.PROJECTION); + builder.setVarOrder(new VariableOrder("y;s;d")); + builder.setSparql(sparql1); + builder.setChildNodeId("childNodeId"); + builder.setExportStrategies(new HashSet<>(Arrays.asList(ExportStrategy.KAFKA))); + final QueryMetadata meta1 = builder.build(); + + String queryId2 = NodeType.generateNewFluoIdForType(NodeType.QUERY); + final QueryMetadata.Builder builder2 = QueryMetadata.builder(queryId2); + builder2.setQueryType(QueryType.PROJECTION); + builder2.setVarOrder(new VariableOrder("y;s;d")); + builder2.setSparql(sparql2); + builder2.setChildNodeId("childNodeId"); + builder2.setExportStrategies(new HashSet<>(Arrays.asList(ExportStrategy.RYA))); + final QueryMetadata meta2 = builder2.build(); + + try (FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) { + // Write it to the Fluo table. + try (Transaction tx = fluoClient.newTransaction()) { + dao.write(tx, meta1); + dao.write(tx, meta2); + tx.commit(); + } + ListFluoQueries listFluoQueries = new ListFluoQueries(); + List queries = listFluoQueries.listFluoQueries(fluoClient); + + FluoQueryStringBuilder queryBuilder1 = new FluoQueryStringBuilder(); + String expected1 = queryBuilder1.setQueryId(queryId1).setQueryType(QueryType.PROJECTION).setQuery(sparql1) + .setExportStrategies(Sets.newHashSet(ExportStrategy.KAFKA)).build(); + + FluoQueryStringBuilder queryBuilder2 = new FluoQueryStringBuilder(); + String expected2 = queryBuilder2.setQueryId(queryId2).setQueryType(QueryType.PROJECTION).setQuery(sparql2) + .setExportStrategies(Sets.newHashSet(ExportStrategy.RYA)).build(); + + Set expected = new HashSet<>(); + expected.add(expected1); + expected.add(expected2); + + Assert.assertEquals(expected, Sets.newHashSet(queries)); + } + } +} diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java index 47a2f293d..66aa04ba3 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java +++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java @@ -90,7 +90,7 @@ public void simpleScanDelete() throws Exception { // Tell the Fluo app to maintain the PCJ. String queryId = new CreateFluoPcj().withRyaIntegration(pcjId, pcjStorage, fluoClient, getAccumuloConnector(), - getRyaInstanceName()); + getRyaInstanceName()).getQueryId(); List ids = getNodeIdStrings(fluoClient, queryId); List prefixes = Arrays.asList("urn:subject_1", "urn:subject_1", "urn:object", "urn:subject_1", "urn:subject_1"); @@ -130,7 +130,7 @@ public void simpleJoinDelete() throws Exception { // Tell the Fluo app to maintain the PCJ. String queryId = new CreateFluoPcj().withRyaIntegration(pcjId, pcjStorage, fluoClient, getAccumuloConnector(), - getRyaInstanceName()); + getRyaInstanceName()).getQueryId(); List ids = getNodeIdStrings(fluoClient, queryId); String joinId = ids.get(2); @@ -176,7 +176,7 @@ public void simpleJoinAdd() throws Exception { // Tell the Fluo app to maintain the PCJ. String queryId = new CreateFluoPcj().withRyaIntegration(pcjId, pcjStorage, fluoClient, getAccumuloConnector(), - getRyaInstanceName()); + getRyaInstanceName()).getQueryId(); List ids = getNodeIdStrings(fluoClient, queryId); String joinId = ids.get(2); @@ -225,7 +225,7 @@ public void joinBatchIntegrationTest() throws Exception { // Tell the Fluo app to maintain the PCJ and sets batch scan size for StatementPatterns to 5 and // batch size of joins to 5. String queryId = new CreateFluoPcj(5, 5).withRyaIntegration(pcjId, pcjStorage, fluoClient, getAccumuloConnector(), - getRyaInstanceName()); + getRyaInstanceName()).getQueryId(); List ids = getNodeIdStrings(fluoClient, queryId); @@ -264,7 +264,7 @@ public void leftJoinBatchIntegrationTest() throws Exception { // Tell the Fluo app to maintain the PCJ and sets batch scan size for StatementPatterns to 5 and // batch size of joins to 5. String queryId = new CreateFluoPcj(5, 5).withRyaIntegration(pcjId, pcjStorage, fluoClient, getAccumuloConnector(), - getRyaInstanceName()); + getRyaInstanceName()).getQueryId(); List ids = getNodeIdStrings(fluoClient, queryId); @@ -305,7 +305,7 @@ public void multiJoinBatchIntegrationTest() throws Exception { // Tell the Fluo app to maintain the PCJ and sets batch scan size for StatementPatterns to 5 and // batch size of joins to 5. String queryId = new CreateFluoPcj(5, 5).withRyaIntegration(pcjId, pcjStorage, fluoClient, getAccumuloConnector(), - getRyaInstanceName()); + getRyaInstanceName()).getQueryId(); List ids = getNodeIdStrings(fluoClient, queryId); diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java index a1d76cb57..27b822241 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java +++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java @@ -33,7 +33,6 @@ import org.apache.fluo.api.client.scanner.RowScanner; import org.apache.fluo.api.data.Bytes; import org.apache.fluo.api.data.Span; -import org.apache.rya.api.client.CreatePCJ.ExportStrategy; import org.apache.rya.api.client.RyaClient; import org.apache.rya.api.client.accumulo.AccumuloRyaClientFactory; import org.apache.rya.indexing.pcj.fluo.api.DeleteFluoPcj; @@ -131,7 +130,7 @@ private String loadData(final String sparql, final Collection stateme // Register the PCJ with Rya. final RyaClient ryaClient = AccumuloRyaClientFactory.build(createConnectionDetails(), getAccumuloConnector()); - final String pcjId = ryaClient.getCreatePCJ().createPCJ(getRyaInstanceName(), sparql, Sets.newHashSet(ExportStrategy.NO_OP_EXPORT)); + final String pcjId = ryaClient.getCreatePCJ().createPCJ(getRyaInstanceName(), sparql, Sets.newHashSet()); // Write the data to Rya. final SailRepositoryConnection ryaConn = super.getRyaSailRepository().getConnection(); diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeletePeriodicPCJ.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeletePeriodicPCJ.java new file mode 100644 index 000000000..e61104ad9 --- /dev/null +++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeletePeriodicPCJ.java @@ -0,0 +1,227 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.indexing.pcj.fluo.integration; + +import static java.util.Objects.requireNonNull; +import static org.junit.Assert.assertEquals; + +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Properties; +import java.util.Set; + +import javax.xml.datatype.DatatypeFactory; + +import org.apache.fluo.api.client.FluoClient; +import org.apache.fluo.api.client.FluoFactory; +import org.apache.fluo.api.client.Snapshot; +import org.apache.fluo.api.client.scanner.ColumnScanner; +import org.apache.fluo.api.client.scanner.RowScanner; +import org.apache.fluo.api.data.Bytes; +import org.apache.fluo.api.data.Span; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.rya.indexing.pcj.fluo.api.CreatePeriodicQuery; +import org.apache.rya.indexing.pcj.fluo.api.DeletePeriodicQuery; +import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils; +import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; +import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage; +import org.apache.rya.pcj.fluo.test.base.KafkaExportITBase; +import org.apache.rya.periodic.notification.api.PeriodicNotificationClient; +import org.apache.rya.periodic.notification.notification.CommandNotification; +import org.apache.rya.periodic.notification.notification.CommandNotification.Command; +import org.apache.rya.periodic.notification.registration.KafkaNotificationRegistrationClient; +import org.apache.rya.periodic.notification.serialization.CommandNotificationSerializer; +import org.junit.Test; +import org.openrdf.model.Statement; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.impl.ValueFactoryImpl; + +import com.google.common.collect.Sets; + +public class CreateDeletePeriodicPCJ extends KafkaExportITBase { + + @Test + public void deletePeriodicPCJ() throws Exception { + String query = "prefix function: " // n + + "prefix time: " // n + + "select (count(?obs) as ?total) where {" // n + + "Filter(function:periodic(?time, 2, .5, time:hours)) " // n + + "?obs ?time. " // n + + "?obs ?id }"; // n + + // Create the Statements that will be loaded into Rya. + final ValueFactory vf = new ValueFactoryImpl(); + final DatatypeFactory dtf = DatatypeFactory.newInstance(); + ZonedDateTime time = ZonedDateTime.now(); + + ZonedDateTime zTime1 = time.minusMinutes(30); + String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT); + + ZonedDateTime zTime2 = zTime1.minusMinutes(30); + String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT); + + ZonedDateTime zTime3 = zTime2.minusMinutes(30); + String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT); + + ZonedDateTime zTime4 = zTime3.minusMinutes(30); + String time4 = zTime4.format(DateTimeFormatter.ISO_INSTANT); + + final Collection statements = Sets.newHashSet( + vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), + vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), + vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), + vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), + vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), + vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), + vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), + vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), + vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")), + vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasTime"), + vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), + vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasId"), vf.createLiteral("id_4"))); + + runTest(query, statements, 29); + + } + + + + private void runTest(String query, Collection statements, int expectedEntries) throws Exception { + try (FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) { + + String topic = "notification_topic"; + PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(super.getAccumuloConnector(), RYA_INSTANCE_NAME); + PeriodicNotificationClient notificationClient = new KafkaNotificationRegistrationClient(topic, + getNotificationProducer("localhost:9092")); + + CreatePeriodicQuery periodicPCJ = new CreatePeriodicQuery(fluoClient, storage); + String id = periodicPCJ.createPeriodicQuery(query, notificationClient).getQueryId(); + + loadData(statements); + + // Ensure the data was loaded. + final List rows = getFluoTableEntries(fluoClient); + assertEquals(expectedEntries, rows.size()); + + DeletePeriodicQuery deletePeriodic = new DeletePeriodicQuery(fluoClient, storage); + deletePeriodic.deletePeriodicQuery(FluoQueryUtils.convertFluoQueryIdToPcjId(id), notificationClient); + + // Ensure all data related to the query has been removed. + final List empty_rows = getFluoTableEntries(fluoClient); + assertEquals(0, empty_rows.size()); + + // Ensure that Periodic Service notified to add and delete PeriodicNotification + Set notifications; + try (KafkaConsumer consumer = makeNotificationConsumer(topic)) { + notifications = getKafkaNotifications(topic, 7000, consumer); + } + assertEquals(2, notifications.size()); + + String notificationId = ""; + boolean addCalled = false; + boolean deleteCalled = false; + for (CommandNotification notification : notifications) { + if (notificationId.length() == 0) { + notificationId = notification.getId(); + } else { + assertEquals(notificationId, notification.getId()); + } + + if (notification.getCommand() == Command.ADD) { + addCalled = true; + } + + if (notification.getCommand() == Command.DELETE) { + deleteCalled = true; + } + } + + assertEquals(true, addCalled); + assertEquals(true, deleteCalled); + } + } + + private List getFluoTableEntries(final FluoClient fluoClient) { + try (Snapshot snapshot = fluoClient.newSnapshot()) { + final List rows = new ArrayList<>(); + final RowScanner rscanner = snapshot.scanner().over(Span.prefix("")).byRow().build(); + + for (final ColumnScanner cscanner : rscanner) { + rows.add(cscanner.getRow()); + } + + return rows; + } + } + + private KafkaProducer getNotificationProducer(String bootStrapServers) { + Properties props = new Properties(); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, CommandNotificationSerializer.class.getName()); + return new KafkaProducer<>(props); + } + + private KafkaConsumer makeNotificationConsumer(final String topic) { + // setup consumer + final Properties consumerProps = new Properties(); + consumerProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "group0"); + consumerProps.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, "consumer0"); + consumerProps.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + consumerProps.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, CommandNotificationSerializer.class.getName()); + + // to make sure the consumer starts from the beginning of the topic + consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + + final KafkaConsumer consumer = new KafkaConsumer<>(consumerProps); + consumer.subscribe(Arrays.asList(topic)); + return consumer; + } + + private Set getKafkaNotifications(String topic, int pollTime, + KafkaConsumer consumer) { + requireNonNull(topic); + + // Read all of the results from the Kafka topic. + final Set results = new HashSet<>(); + + final ConsumerRecords records = consumer.poll(pollTime); + final Iterator> recordIterator = records.iterator(); + while (recordIterator.hasNext()) { + results.add(recordIterator.next().value()); + } + + return results; + } + +} diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java index 8911f564c..dbedfb364 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java +++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java @@ -90,7 +90,7 @@ public void newResultsExportedTest() throws Exception { vf.createStatement(vf.createURI("http://Frank"), vf.createURI("http://worksAt"), vf.createURI("http://Chipotle"))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); FluoITHelper.printFluoTable(super.getFluoConfiguration()); @@ -136,7 +136,7 @@ public void min() throws Exception { vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); @@ -163,7 +163,7 @@ public void max() throws Exception { vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); @@ -194,7 +194,7 @@ public void count() throws Exception { vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(3.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); @@ -221,7 +221,7 @@ public void sum() throws Exception { vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:count"), vf.createLiteral(2))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); @@ -248,7 +248,7 @@ public void average() throws Exception { vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(8))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); try(FluoClient fluo = new FluoClientImpl(super.getFluoConfiguration())) { FluoITHelper.printFluoTable(fluo); @@ -280,7 +280,7 @@ public void aggregateWithFilter() throws Exception { vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); @@ -307,7 +307,7 @@ public void multipleAggregations() throws Exception { vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(2.75))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final MapBindingSet expectedResult = new MapBindingSet(); @@ -338,7 +338,7 @@ public void groupBySingleBinding() throws Exception { vf.createStatement(vf.createURI("urn:banana"), vf.createURI("urn:price"), vf.createLiteral(1.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set expectedResults = new HashSet<>(); @@ -399,7 +399,7 @@ public void groupByManyBindings_averages() throws Exception { vf.createStatement(vf.createURI("urn:6"), vf.createURI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set expectedResults = new HashSet<>(); @@ -477,7 +477,7 @@ public void nestedGroupByManyBindings_averages() throws Exception { vf.createStatement(vf.createURI("urn:6"), vf.createURI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set expectedResults = new HashSet<>(); @@ -554,7 +554,7 @@ public void nestedWithJoinGroupByManyBindings_averages() throws Exception { vf.createStatement(vf.createURI("urn:6"), vf.createURI("urn:price"), vf.createLiteral(4.99))); // Create the PCJ in Fluo and load the statements into Rya. - final String pcjId = loadData(sparql, statements); + final String pcjId = loadDataAndCreateQuery(sparql, statements); // Create the expected results of the SPARQL query once the PCJ has been computed. final Set expectedResults = new HashSet<>(); diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java index 0aefacaca..4974aee9d 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java +++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java @@ -70,10 +70,6 @@ */ public class QueryIT extends RyaExportITBase { - private enum ExporterType { - Pcj, Periodic - }; - @Test public void optionalStatements() throws Exception { // A query that has optional statement patterns. This query is looking for all @@ -112,7 +108,7 @@ public void optionalStatements() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(sparql, statements, expectedResults, ExporterType.Pcj); + runTest(sparql, statements, expectedResults, ExportStrategy.RYA); } /** @@ -187,7 +183,7 @@ public void complexQuery() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(sparql, statements, expectedResults, ExporterType.Pcj); + runTest(sparql, statements, expectedResults, ExportStrategy.RYA); } @Test @@ -241,7 +237,7 @@ public void withURIFilters() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(sparql, statements, expectedResults, ExporterType.Pcj); + runTest(sparql, statements, expectedResults, ExportStrategy.RYA); } @Test @@ -278,7 +274,7 @@ public void withNumericFilters() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(sparql, statements, expectedResults, ExporterType.Pcj); + runTest(sparql, statements, expectedResults, ExportStrategy.RYA); } @Test @@ -359,7 +355,7 @@ public Value evaluate(final ValueFactory valueFactory, final Value... args) thro expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(sparql, statements, expectedResults, ExporterType.Pcj); + runTest(sparql, statements, expectedResults, ExportStrategy.RYA); } @Test @@ -424,7 +420,7 @@ public void withTemporal() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(sparql, statements, expectedResults, ExporterType.Pcj); + runTest(sparql, statements, expectedResults, ExportStrategy.RYA); } @Test @@ -525,7 +521,7 @@ public void periodicQueryTestWithoutAggregation() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(query, statements, expectedResults, ExporterType.Periodic); + runTest(query, statements, expectedResults, ExportStrategy.PERIODIC); } @Test @@ -596,7 +592,7 @@ public void periodicQueryTestWithAggregation() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(query, statements, expectedResults, ExporterType.Periodic); + runTest(query, statements, expectedResults, ExportStrategy.PERIODIC); } @Test @@ -713,7 +709,7 @@ public void periodicQueryTestWithAggregationAndGroupBy() throws Exception { expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(query, statements, expectedResults, ExporterType.Periodic); + runTest(query, statements, expectedResults, ExportStrategy.PERIODIC); } @@ -792,7 +788,7 @@ public void nestedPeriodicQueryTestWithAggregationAndGroupBy() throws Exception expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(query, statements, expectedResults, ExporterType.Periodic); + runTest(query, statements, expectedResults, ExportStrategy.PERIODIC); } @Test @@ -876,7 +872,7 @@ public void nestedJoinPeriodicQueryWithAggregationAndGroupBy() throws Exception expectedResults.add(bs); // Verify the end results of the query match the expected results. - runTest(query, statements, expectedResults, ExporterType.Periodic); + runTest(query, statements, expectedResults, ExportStrategy.PERIODIC); } @Test(expected= UnsupportedQueryException.class) @@ -896,11 +892,11 @@ public void nestedConstructPeriodicQueryWithAggregationAndGroupBy() throws Excep final Set expectedResults = new HashSet<>(); // Verify the end results of the query match the expected results. - runTest(query, statements, expectedResults, ExporterType.Periodic); + runTest(query, statements, expectedResults, ExportStrategy.PERIODIC); } public void runTest(final String sparql, final Collection statements, final Collection expectedResults, - ExporterType type) throws Exception { + ExportStrategy strategy) throws Exception { requireNonNull(sparql); requireNonNull(statements); requireNonNull(expectedResults); @@ -910,8 +906,8 @@ public void runTest(final String sparql, final Collection statements, final RyaClient ryaClient = AccumuloRyaClientFactory.build(createConnectionDetails(), accumuloConn); - switch (type) { - case Pcj: + switch (strategy) { + case RYA: ryaClient.getCreatePCJ().createPCJ(getRyaInstanceName(), sparql); addStatementsAndWait(statements); // Fetch the value that is stored within the PCJ table. @@ -922,11 +918,11 @@ public void runTest(final String sparql, final Collection statements, assertEquals(expectedResults, results); } break; - case Periodic: + case PERIODIC: PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(accumuloConn, getRyaInstanceName()); String periodicId = periodicStorage.createPeriodicQuery(sparql); try (FluoClient fluo = new FluoClientImpl(super.getFluoConfiguration())) { - new CreateFluoPcj().createPcj(periodicId, sparql, Sets.newHashSet(ExportStrategy.RYA), fluo); + new CreateFluoPcj().createPcj(periodicId, sparql, Sets.newHashSet(ExportStrategy.PERIODIC), fluo); } addStatementsAndWait(statements); @@ -938,6 +934,8 @@ public void runTest(final String sparql, final Collection statements, } assertEquals(expectedResults, results); break; + default: + throw new RuntimeException("Invalid export option"); } } diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java index ed9ce60a7..59fe54fd2 100644 --- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java +++ b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java @@ -321,7 +321,7 @@ protected KafkaConsumer makeConsumer(final String return consumer; } - protected String loadData(final String sparql, final Collection statements) throws Exception { + protected String loadDataAndCreateQuery(final String sparql, final Collection statements) throws Exception { requireNonNull(sparql); requireNonNull(statements); @@ -334,7 +334,16 @@ protected String loadData(final String sparql, final Collection state final String pcjId = ryaClient.getCreatePCJ().createPCJ(RYA_INSTANCE_NAME, sparql, Sets.newHashSet(ExportStrategy.KAFKA)); - // Write the data to Rya. + loadData(statements); + + // The PCJ Id is the topic name the results will be written to. + return pcjId; + } + + protected void loadData(final Collection statements) throws Exception { + + requireNonNull(statements); + final SailRepositoryConnection ryaConn = getRyaSailRepository().getConnection(); ryaConn.begin(); ryaConn.add(statements); @@ -343,9 +352,7 @@ protected String loadData(final String sparql, final Collection state // Wait for the Fluo application to finish computing the end result. super.getMiniFluo().waitForObservers(); - - // The PCJ Id is the topic name the results will be written to. - return pcjId; + } } diff --git a/extras/rya.periodic.service/periodic.service.api/.gitignore b/extras/rya.periodic.service/periodic.service.api/.gitignore new file mode 100644 index 000000000..b83d22266 --- /dev/null +++ b/extras/rya.periodic.service/periodic.service.api/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/extras/rya.periodic.service/periodic.service.api/pom.xml b/extras/rya.periodic.service/periodic.service.api/pom.xml new file mode 100644 index 000000000..b57beafbb --- /dev/null +++ b/extras/rya.periodic.service/periodic.service.api/pom.xml @@ -0,0 +1,52 @@ + + + + 4.0.0 + + org.apache.rya + rya.periodic.service + 3.2.11-incubating-SNAPSHOT + + + rya.periodic.service.api + + Apache Rya Periodic Service API + API for Periodic Service Application + + + + + com.google.code.gson + gson + 2.8.0 + + + junit + junit + test + + + org.openrdf.sesame + sesame-query + + + org.apache.kafka + kafka-clients + + + org.apache.rya + rya.indexing.pcj + + + + \ No newline at end of file diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/BinPruner.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BinPruner.java similarity index 95% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/BinPruner.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BinPruner.java index 571ee1ccd..f4a083c50 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/BinPruner.java +++ b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BinPruner.java @@ -18,8 +18,8 @@ */ package org.apache.rya.periodic.notification.api; -import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants; import org.openrdf.query.Binding; +import org.openrdf.query.BindingSet; /** * Object that cleans up old {@link BindingSet}s corresponding to the specified diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/BindingSetExporter.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetExporter.java similarity index 86% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/BindingSetExporter.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetExporter.java index 500a435cd..491576b43 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/BindingSetExporter.java +++ b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetExporter.java @@ -18,8 +18,7 @@ */ package org.apache.rya.periodic.notification.api; -import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter.ResultExportException; -import org.apache.rya.periodic.notification.exporter.BindingSetRecord; +import org.openrdf.query.BindingSet; /** * An Object that is used to export {@link BindingSet}s to an external repository or queuing system. @@ -33,6 +32,6 @@ public interface BindingSetExporter { * @param bindingSet - {@link BindingSet} to be exported * @throws ResultExportException */ - public void exportNotification(BindingSetRecord bindingSet) throws ResultExportException; + public void exportNotification(BindingSetRecord bindingSet) throws BindingSetRecordExportException; } diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/BindingSetRecord.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetRecord.java similarity index 97% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/BindingSetRecord.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetRecord.java index 471b02190..c3f70f1d3 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/BindingSetRecord.java +++ b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetRecord.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.rya.periodic.notification.exporter; +package org.apache.rya.periodic.notification.api; import org.openrdf.query.BindingSet; diff --git a/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetRecordExportException.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetRecordExportException.java new file mode 100644 index 000000000..94e498049 --- /dev/null +++ b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/BindingSetRecordExportException.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.rya.periodic.notification.api; + +/** + * A result could not be exported. + */ +public class BindingSetRecordExportException extends Exception { + private static final long serialVersionUID = 1L; + + /** + * Constructs an instance of {@link BindingSetRecordExportException}. + * + * @param message - Explains why the exception was thrown. + */ + public BindingSetRecordExportException(final String message) { + super(message); + } + + /** + * Constructs an instance of {@link BindingSetRecordExportException}. + * + * @param message - Explains why the exception was thrown. + * @param cause - The exception that caused this one to be thrown. + */ + public BindingSetRecordExportException(final String message, final Throwable cause) { + super(message, cause); + } +} diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/LifeCycle.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/LifeCycle.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/LifeCycle.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/LifeCycle.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/NodeBin.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/NodeBin.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/NodeBin.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/NodeBin.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/Notification.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/Notification.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/Notification.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/Notification.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/NotificationCoordinatorExecutor.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/NotificationCoordinatorExecutor.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/NotificationCoordinatorExecutor.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/NotificationCoordinatorExecutor.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/NotificationProcessor.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/NotificationProcessor.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/NotificationProcessor.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/NotificationProcessor.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/PeriodicNotificationClient.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/PeriodicNotificationClient.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/PeriodicNotificationClient.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/api/PeriodicNotificationClient.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/BasicNotification.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/BasicNotification.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/BasicNotification.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/BasicNotification.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/CommandNotification.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/CommandNotification.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/CommandNotification.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/CommandNotification.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/PeriodicNotification.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/PeriodicNotification.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/PeriodicNotification.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/PeriodicNotification.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/TimestampedNotification.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/TimestampedNotification.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/notification/TimestampedNotification.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/notification/TimestampedNotification.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/registration/kafka/KafkaNotificationRegistrationClient.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/registration/KafkaNotificationRegistrationClient.java similarity index 97% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/registration/kafka/KafkaNotificationRegistrationClient.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/registration/KafkaNotificationRegistrationClient.java index ec94bb78c..bb438be74 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/registration/kafka/KafkaNotificationRegistrationClient.java +++ b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/registration/KafkaNotificationRegistrationClient.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.rya.periodic.notification.registration.kafka; +package org.apache.rya.periodic.notification.registration; import java.util.concurrent.TimeUnit; diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/BasicNotificationTypeAdapter.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/BasicNotificationTypeAdapter.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/BasicNotificationTypeAdapter.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/BasicNotificationTypeAdapter.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationSerializer.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationSerializer.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationSerializer.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationSerializer.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationTypeAdapter.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationTypeAdapter.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationTypeAdapter.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/CommandNotificationTypeAdapter.java diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/PeriodicNotificationTypeAdapter.java b/extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/PeriodicNotificationTypeAdapter.java similarity index 100% rename from extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/serialization/PeriodicNotificationTypeAdapter.java rename to extras/rya.periodic.service/periodic.service.api/src/main/java/org/apache/rya/periodic/notification/serialization/PeriodicNotificationTypeAdapter.java diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml b/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml index 20a064788..402f81d3e 100644 --- a/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml +++ b/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml @@ -1,22 +1,14 @@ - + 4.0.0 @@ -54,7 +46,6 @@ under the License. org.apache.rya rya.periodic.service.notification - ${project.version} logback-classic diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java index cb7557c53..91097750c 100644 --- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java +++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java @@ -21,7 +21,6 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -import java.nio.file.Files; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; @@ -34,15 +33,16 @@ import java.util.Optional; import java.util.Properties; import java.util.Set; +import java.util.UUID; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; -import org.I0Itec.zkclient.ZkClient; import org.apache.accumulo.core.client.Connector; import org.apache.fluo.api.client.FluoClient; import org.apache.fluo.api.config.FluoConfiguration; import org.apache.fluo.core.client.FluoClientImpl; +import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -52,21 +52,27 @@ import org.apache.kafka.common.serialization.StringSerializer; import org.apache.rya.api.resolver.RdfToRyaConversions; import org.apache.rya.indexing.accumulo.ConfigUtils; +import org.apache.rya.indexing.pcj.fluo.api.CreatePeriodicQuery; import org.apache.rya.indexing.pcj.fluo.api.InsertTriples; import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants; import org.apache.rya.indexing.pcj.fluo.app.util.FluoClientFactory; +import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils; import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.CloseableIterator; import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage; +import org.apache.rya.kafka.base.EmbeddedKafkaInstance; +import org.apache.rya.kafka.base.EmbeddedKafkaSingleton; +import org.apache.rya.kafka.base.KafkaTestInstanceRule; import org.apache.rya.pcj.fluo.test.base.RyaExportITBase; -import org.apache.rya.periodic.notification.api.CreatePeriodicQuery; import org.apache.rya.periodic.notification.notification.CommandNotification; -import org.apache.rya.periodic.notification.registration.kafka.KafkaNotificationRegistrationClient; +import org.apache.rya.periodic.notification.registration.KafkaNotificationRegistrationClient; import org.apache.rya.periodic.notification.serialization.BindingSetSerDe; import org.apache.rya.periodic.notification.serialization.CommandNotificationSerializer; import org.junit.After; import org.junit.Assert; import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; import org.junit.Test; import org.openrdf.model.Statement; import org.openrdf.model.Value; @@ -81,14 +87,9 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; -import kafka.server.KafkaConfig; -import kafka.server.KafkaServer; -import kafka.utils.MockTime; -import kafka.utils.TestUtils; -import kafka.utils.Time; -import kafka.utils.ZKStringSerializer$; -import kafka.utils.ZkUtils; -import kafka.zk.EmbeddedZookeeper; +import static org.apache.rya.periodic.notification.application.PeriodicNotificationApplicationConfiguration.NOTIFICATION_TOPIC; +import static org.apache.rya.periodic.notification.application.PeriodicNotificationApplicationConfiguration.KAFKA_BOOTSTRAP_SERVERS;; + public class PeriodicNotificationApplicationIT extends RyaExportITBase { @@ -97,45 +98,38 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase { private KafkaProducer producer; private Properties props; private Properties kafkaProps; - PeriodicNotificationApplicationConfiguration conf; + private PeriodicNotificationApplicationConfiguration conf; + private static EmbeddedKafkaInstance embeddedKafka = EmbeddedKafkaSingleton.getInstance(); + private static String bootstrapServers; + + @Rule + public KafkaTestInstanceRule rule = new KafkaTestInstanceRule(false); - private static final String ZKHOST = "127.0.0.1"; - private static final String BROKERHOST = "127.0.0.1"; - private static final String BROKERPORT = "9092"; - private ZkUtils zkUtils; - private KafkaServer kafkaServer; - private EmbeddedZookeeper zkServer; - private ZkClient zkClient; + @BeforeClass + public static void initClass() { + bootstrapServers = embeddedKafka.createBootstrapServerConfig().getProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG); + } @Before public void init() throws Exception { - setUpKafka(); + String topic = rule.getKafkaTopicName(); + rule.createTopic(topic); + + //get user specified props and update with the embedded kafka bootstrap servers and rule generated topic props = getProps(); + props.setProperty(NOTIFICATION_TOPIC, topic); + props.setProperty(KAFKA_BOOTSTRAP_SERVERS, bootstrapServers); conf = new PeriodicNotificationApplicationConfiguration(props); + + //create Kafka Producer kafkaProps = getKafkaProperties(conf); - app = PeriodicNotificationApplicationFactory.getPeriodicApplication(props); producer = new KafkaProducer<>(kafkaProps, new StringSerializer(), new CommandNotificationSerializer()); + + //extract kafka specific properties from application config + app = PeriodicNotificationApplicationFactory.getPeriodicApplication(props); registrar = new KafkaNotificationRegistrationClient(conf.getNotificationTopic(), producer); } - private void setUpKafka() throws Exception { - // Setup Kafka. - zkServer = new EmbeddedZookeeper(); - final String zkConnect = ZKHOST + ":" + zkServer.port(); - zkClient = new ZkClient(zkConnect, 30000, 30000, ZKStringSerializer$.MODULE$); - zkUtils = ZkUtils.apply(zkClient, false); - - // setup Brokersparql - final Properties brokerProps = new Properties(); - brokerProps.setProperty("zookeeper.connect", zkConnect); - brokerProps.setProperty("broker.id", "0"); - brokerProps.setProperty("log.dirs", Files.createTempDirectory("kafka-").toAbsolutePath().toString()); - brokerProps.setProperty("listeners", "PLAINTEXT://" + BROKERHOST + ":" + BROKERPORT); - final KafkaConfig config = new KafkaConfig(brokerProps); - final Time mock = new MockTime(); - kafkaServer = TestUtils.createServer(config, mock); - } - @Test public void periodicApplicationWithAggAndGroupByTest() throws Exception { @@ -185,10 +179,10 @@ public void periodicApplicationWithAggAndGroupByTest() throws Exception { Connector connector = ConfigUtils.getConnector(conf); PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix()); CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage); - String id = periodicQuery.createQueryAndRegisterWithKafka(sparql, registrar); + String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId()); addData(statements); app.start(); -// + Multimap actual = HashMultimap.create(); try (KafkaConsumer consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) { consumer.subscribe(Arrays.asList(id)); @@ -321,10 +315,10 @@ public void periodicApplicationWithAggTest() throws Exception { Connector connector = ConfigUtils.getConnector(conf); PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix()); CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage); - String id = periodicQuery.createQueryAndRegisterWithKafka(sparql, registrar); + String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId()); addData(statements); app.start(); -// + Multimap expected = HashMultimap.create(); try (KafkaConsumer consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) { consumer.subscribe(Arrays.asList(id)); @@ -411,10 +405,10 @@ public void periodicApplicationTest() throws Exception { Connector connector = ConfigUtils.getConnector(conf); PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix()); CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage); - String id = periodicQuery.createQueryAndRegisterWithKafka(sparql, registrar); + String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId()); addData(statements); app.start(); -// + Multimap expected = HashMultimap.create(); try (KafkaConsumer consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) { consumer.subscribe(Arrays.asList(id)); @@ -458,13 +452,6 @@ public void periodicApplicationTest() throws Exception { public void shutdown() { registrar.close(); app.stop(); - teardownKafka(); - } - - private void teardownKafka() { - kafkaServer.shutdown(); - zkClient.close(); - zkServer.shutdown(); } private void addData(Collection statements) throws DatatypeConfigurationException { @@ -473,20 +460,17 @@ private void addData(Collection statements) throws DatatypeConfigurat InsertTriples inserter = new InsertTriples(); statements.forEach(x -> inserter.insert(fluo, RdfToRyaConversions.convertStatement(x))); getMiniFluo().waitForObservers(); -// FluoITHelper.printFluoTable(fluo); } - } - private Properties getKafkaProperties(PeriodicNotificationApplicationConfiguration conf) { + private static Properties getKafkaProperties(PeriodicNotificationApplicationConfiguration conf) { Properties kafkaProps = new Properties(); - kafkaProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, conf.getBootStrapServers()); - kafkaProps.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, conf.getNotificationClientId()); + kafkaProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + kafkaProps.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString()); kafkaProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, conf.getNotificationGroupId()); kafkaProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); return kafkaProps; } - private Properties getProps() throws IOException { diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java index cf24974eb..e05ca6f2a 100644 --- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java +++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java @@ -31,10 +31,11 @@ import org.apache.rya.periodic.notification.coordinator.PeriodicNotificationCoordinatorExecutor; import org.apache.rya.periodic.notification.notification.TimestampedNotification; import org.apache.rya.periodic.notification.recovery.PeriodicNotificationProvider; +import org.junit.Assert; import org.junit.Test; import org.openrdf.query.MalformedQueryException; -import org.junit.Assert; +import com.google.common.collect.Sets; public class PeriodicNotificationProviderIT extends AccumuloExportITBase { @@ -55,7 +56,7 @@ public void testProvider() throws MalformedQueryException, InterruptedException, String id = null; try(FluoClient fluo = new FluoClientImpl(getFluoConfiguration())) { - id = pcj.createPcj(sparql, fluo).getQueryId(); + id = pcj.createPcj(FluoQueryUtils.createNewPcjId(), sparql, Sets.newHashSet(), fluo).getQueryId(); provider.processRegisteredNotifications(coord, fluo.newSnapshot()); } diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java index c5dc809d7..874e7e279 100644 --- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java +++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java @@ -36,6 +36,7 @@ import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; import org.apache.rya.kafka.base.KafkaITBase; import org.apache.rya.kafka.base.KafkaTestInstanceRule; +import org.apache.rya.periodic.notification.api.BindingSetRecord; import org.apache.rya.periodic.notification.serialization.BindingSetSerDe; import org.junit.Assert; import org.junit.Rule; diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/processor/PeriodicNotificationProcessorIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/processor/PeriodicNotificationProcessorIT.java index fa60e4872..21109ae6e 100644 --- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/processor/PeriodicNotificationProcessorIT.java +++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/processor/PeriodicNotificationProcessorIT.java @@ -30,8 +30,8 @@ import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage; import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder; import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet; +import org.apache.rya.periodic.notification.api.BindingSetRecord; import org.apache.rya.periodic.notification.api.NodeBin; -import org.apache.rya.periodic.notification.exporter.BindingSetRecord; import org.apache.rya.periodic.notification.notification.PeriodicNotification; import org.apache.rya.periodic.notification.notification.TimestampedNotification; import org.junit.Assert; diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java index bb98b7f82..830fa4687 100644 --- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java +++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java @@ -39,9 +39,11 @@ import org.apache.fluo.api.data.Span; import org.apache.fluo.core.client.FluoClientImpl; import org.apache.rya.api.resolver.RdfToRyaConversions; +import org.apache.rya.indexing.pcj.fluo.api.CreatePeriodicQuery; import org.apache.rya.indexing.pcj.fluo.api.InsertTriples; import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants; import org.apache.rya.indexing.pcj.fluo.app.NodeType; +import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils; import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil; import org.apache.rya.indexing.pcj.fluo.app.util.RowKeyUtil; import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; @@ -50,9 +52,7 @@ import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage; import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder; import org.apache.rya.pcj.fluo.test.base.RyaExportITBase; -import org.apache.rya.periodic.notification.api.CreatePeriodicQuery; import org.apache.rya.periodic.notification.api.NodeBin; -import org.apache.rya.periodic.notification.notification.PeriodicNotification; import org.junit.Assert; import org.junit.Test; import org.openrdf.model.Statement; @@ -85,8 +85,7 @@ public void periodicPrunerTest() throws Exception { PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(super.getAccumuloConnector(), getRyaInstanceName()); CreatePeriodicQuery createPeriodicQuery = new CreatePeriodicQuery(fluo, periodicStorage); - PeriodicNotification notification = createPeriodicQuery.createPeriodicQuery(sparql); - String queryId = notification.getId(); + String queryId = FluoQueryUtils.convertFluoQueryIdToPcjId(createPeriodicQuery.createPeriodicQuery(sparql).getQueryId()); // create statements to ingest into Fluo final ValueFactory vf = new ValueFactoryImpl(); diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java index bde406f74..522e69d2b 100644 --- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java +++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java @@ -18,31 +18,44 @@ */package org.apache.rya.periodic.notification.registration.kafka; import java.util.Properties; +import java.util.UUID; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.log4j.BasicConfigurator; -import org.apache.rya.pcj.fluo.test.base.KafkaExportITBase; +import org.apache.rya.kafka.base.KafkaITBase; +import org.apache.rya.kafka.base.KafkaTestInstanceRule; import org.apache.rya.periodic.notification.coordinator.PeriodicNotificationCoordinatorExecutor; import org.apache.rya.periodic.notification.notification.CommandNotification; import org.apache.rya.periodic.notification.notification.TimestampedNotification; +import org.apache.rya.periodic.notification.registration.KafkaNotificationRegistrationClient; import org.apache.rya.periodic.notification.serialization.CommandNotificationSerializer; -import org.junit.After; import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; import org.junit.Test; -public class PeriodicCommandNotificationConsumerIT extends KafkaExportITBase { +public class PeriodicCommandNotificationConsumerIT extends KafkaITBase { - private static final String topic = "topic"; private KafkaNotificationRegistrationClient registration; private PeriodicNotificationCoordinatorExecutor coord; private KafkaNotificationProvider provider; + private String bootstrapServer; + + @Rule + public KafkaTestInstanceRule rule = new KafkaTestInstanceRule(false); + + @Before + public void init() throws Exception { + bootstrapServer = createBootstrapServerConfig().getProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG); + } @Test public void kafkaNotificationProviderTest() throws InterruptedException { @@ -52,6 +65,9 @@ public void kafkaNotificationProviderTest() throws InterruptedException { BlockingQueue notifications = new LinkedBlockingQueue<>(); Properties props = createKafkaConfig(); KafkaProducer producer = new KafkaProducer<>(props); + String topic = rule.getKafkaTopicName(); + rule.createTopic(topic); + registration = new KafkaNotificationRegistrationClient(topic, producer); coord = new PeriodicNotificationCoordinatorExecutor(1, notifications); provider = new KafkaNotificationProvider(topic, new StringDeserializer(), new CommandNotificationSerializer(), props, coord, 1); @@ -80,6 +96,9 @@ public void kafkaNotificationMillisProviderTest() throws InterruptedException { BlockingQueue notifications = new LinkedBlockingQueue<>(); Properties props = createKafkaConfig(); KafkaProducer producer = new KafkaProducer<>(props); + String topic = rule.getKafkaTopicName(); + rule.createTopic(topic); + registration = new KafkaNotificationRegistrationClient(topic, producer); coord = new PeriodicNotificationCoordinatorExecutor(1, notifications); provider = new KafkaNotificationProvider(topic, new StringDeserializer(), new CommandNotificationSerializer(), props, coord, 1); @@ -108,8 +127,8 @@ private void tearDown() { private Properties createKafkaConfig() { Properties props = new Properties(); - props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); - props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "group0"); + props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer); + props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()); props.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, "consumer0"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); diff --git a/extras/rya.periodic.service/periodic.service.notification/pom.xml b/extras/rya.periodic.service/periodic.service.notification/pom.xml index 217388859..1e59e1554 100644 --- a/extras/rya.periodic.service/periodic.service.notification/pom.xml +++ b/extras/rya.periodic.service/periodic.service.notification/pom.xml @@ -1,107 +1,112 @@ - 4.0.0 - - - org.apache.rya - rya.periodic.service - 3.2.11-incubating-SNAPSHOT - + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + + + org.apache.rya + rya.periodic.service + 3.2.11-incubating-SNAPSHOT + - rya.periodic.service.notification - - Apache Rya Periodic Service Notification + rya.periodic.service.notification + + Apache Rya Periodic Service Notification Notifications for Rya Periodic Service - + + + + org.apache.twill + twill-api + 0.11.0 + + + org.apache.twill + twill-yarn + 0.11.0 + + + kafka_2.10 + org.apache.kafka + + + + + com.google.code.gson + gson + 2.8.0 + + + junit + junit + test + + + org.apache.fluo + fluo-api + + + org.apache.fluo + fluo-core + + + org.apache.rya + rya.indexing + + + org.openrdf.sesame + sesame-query + + + org.apache.rya + rya.indexing.pcj + + + org.apache.rya + rya.pcj.fluo.app + + + org.apache.rya + rya.periodic.service.api + - - org.apache.twill - twill-api - 0.11.0 - - - org.apache.twill - twill-yarn - 0.11.0 - - - kafka_2.10 - org.apache.kafka - - - - - com.google.code.gson - gson - 2.8.0 - compile - - - junit - junit - - - org.apache.fluo - fluo-api - - - org.apache.fluo - fluo-core - - - org.apache.rya - rya.indexing - - - org.openrdf.sesame - sesame-query - - - org.apache.rya - rya.indexing.pcj - - - org.apache.rya - rya.pcj.fluo.app - - + - - - - org.apache.maven.plugins - maven-compiler-plugin - - UTF-8 - 1.8 - 1.8 - - - - org.apache.maven.plugins - maven-shade-plugin - 3.0.0 - - - package - - shade - - - - - - + + + + org.apache.maven.plugins + maven-compiler-plugin + + UTF-8 + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-shade-plugin + 3.0.0 + + + package + + shade + + + + + + \ No newline at end of file diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java deleted file mode 100644 index 60a3e7c02..000000000 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.rya.periodic.notification.api; - -import java.util.Optional; - -import org.apache.fluo.api.client.FluoClient; -import org.apache.rya.api.client.CreatePCJ.ExportStrategy; -import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj; -import org.apache.rya.indexing.pcj.fluo.app.query.PeriodicQueryNode; -import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException; -import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils; -import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil; -import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; -import org.apache.rya.indexing.pcj.storage.PeriodicQueryStorageException; -import org.apache.rya.periodic.notification.application.PeriodicNotificationApplication; -import org.apache.rya.periodic.notification.notification.PeriodicNotification; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.evaluation.function.Function; - -import com.google.common.collect.Sets; - -/** - * Object that creates a Periodic Query. A Periodic Query is any query - * requesting periodic updates about events that occurred within a given - * window of time of this instant. This is also known as a rolling window - * query. Period Queries can be expressed using SPARQL by including the - * {@link Function} indicated by the URI {@link PeriodicQueryUtil#PeriodicQueryURI} - * in the query. The user must provide this Function with the following arguments: - * the temporal variable in the query that will be filtered on, the window of time - * that events must occur within, the period at which the user wants to receive updates, - * and the time unit. The following query requests all observations that occurred - * within the last minute and requests updates every 15 seconds. It also performs - * a count on those observations. - *

  • - *
  • prefix function: http://org.apache.rya/function# - *
  • "prefix time: http://www.w3.org/2006/time# - *
  • "select (count(?obs) as ?total) where { - *
  • "Filter(function:periodic(?time, 1, .25, time:minutes)) - *
  • "?obs uri:hasTime ?time. - *
  • "?obs uri:hasId ?id } - *
  • - * - * This class is responsible for taking a Periodic Query expressed as a SPARQL query - * and adding to Fluo and Kafka so that it can be processed by the {@link PeriodicNotificationApplication}. - */ -public class CreatePeriodicQuery { - - private FluoClient fluoClient; - private PeriodicQueryResultStorage periodicStorage; - Function funciton; - PeriodicQueryUtil util; - - - public CreatePeriodicQuery(FluoClient fluoClient, PeriodicQueryResultStorage periodicStorage) { - this.fluoClient = fluoClient; - this.periodicStorage = periodicStorage; - } - - /** - * Creates a Periodic Query by adding the query to Fluo and using the resulting - * Fluo id to create a {@link PeriodicQueryResultStorage} table. - * @param sparql - sparql query registered to Fluo whose results are stored in PeriodicQueryResultStorage table - * @return PeriodicNotification that can be used to register register this query with the {@link PeriodicNotificationApplication}. - */ - public PeriodicNotification createPeriodicQuery(String sparql) { - try { - Optional optNode = PeriodicQueryUtil.getPeriodicNode(sparql); - if(optNode.isPresent()) { - PeriodicQueryNode periodicNode = optNode.get(); - String pcjId = FluoQueryUtils.createNewPcjId(); - - //register query with Fluo - CreateFluoPcj createPcj = new CreateFluoPcj(); - createPcj.createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.RYA), fluoClient); - - //register query with PeriodicResultStorage table - periodicStorage.createPeriodicQuery(pcjId, sparql); - //create notification - PeriodicNotification notification = PeriodicNotification.builder().id(pcjId).period(periodicNode.getPeriod()) - .timeUnit(periodicNode.getUnit()).build(); - return notification; - } else { - throw new RuntimeException("Invalid PeriodicQuery. Query must possess a PeriodicQuery Filter."); - } - } catch (MalformedQueryException | PeriodicQueryStorageException | UnsupportedQueryException e) { - throw new RuntimeException(e); - } - } - - /** - * Creates a Periodic Query by adding the query to Fluo and using the resulting - * Fluo id to create a {@link PeriodicQueryResultStorage} table. In addition, this - * method registers the PeriodicQuery with the PeriodicNotificationApplication to poll - * the PeriodicQueryResultStorage table at regular intervals and export results to Kafka. - * The PeriodicNotificationApp queries the result table at a regular interval indicated by the Period of - * the PeriodicQuery. - * @param sparql - sparql query registered to Fluo whose results are stored in PeriodicQueryResultStorage table - * @param PeriodicNotificationClient - registers the PeriodicQuery with the {@link PeriodicNotificationApplication} - * @return id of the PeriodicQuery and PeriodicQueryResultStorage table (these are the same) - */ - public String createQueryAndRegisterWithKafka(String sparql, PeriodicNotificationClient periodicClient) { - PeriodicNotification notification = createPeriodicQuery(sparql); - periodicClient.addNotification(notification); - return notification.getId(); - } - -} diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplication.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplication.java index 6dd712682..92a7d18ac 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplication.java +++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplication.java @@ -21,10 +21,10 @@ import org.apache.log4j.Logger; import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil; import org.apache.rya.periodic.notification.api.BinPruner; +import org.apache.rya.periodic.notification.api.BindingSetRecord; import org.apache.rya.periodic.notification.api.LifeCycle; import org.apache.rya.periodic.notification.api.NodeBin; import org.apache.rya.periodic.notification.api.NotificationCoordinatorExecutor; -import org.apache.rya.periodic.notification.exporter.BindingSetRecord; import org.apache.rya.periodic.notification.exporter.KafkaExporterExecutor; import org.apache.rya.periodic.notification.processor.NotificationProcessorExecutor; import org.apache.rya.periodic.notification.pruner.PeriodicQueryPrunerExecutor; diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationFactory.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationFactory.java index 248b2bfd5..771a4abf9 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationFactory.java +++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationFactory.java @@ -38,10 +38,10 @@ import org.apache.rya.indexing.pcj.fluo.app.util.FluoClientFactory; import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage; +import org.apache.rya.periodic.notification.api.BindingSetRecord; import org.apache.rya.periodic.notification.api.NodeBin; import org.apache.rya.periodic.notification.api.NotificationCoordinatorExecutor; import org.apache.rya.periodic.notification.coordinator.PeriodicNotificationCoordinatorExecutor; -import org.apache.rya.periodic.notification.exporter.BindingSetRecord; import org.apache.rya.periodic.notification.exporter.KafkaExporterExecutor; import org.apache.rya.periodic.notification.notification.TimestampedNotification; import org.apache.rya.periodic.notification.processor.NotificationProcessorExecutor; diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java index 488001548..c2e5ebf10 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java +++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java @@ -28,6 +28,7 @@ import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.log4j.Logger; import org.apache.rya.periodic.notification.api.BindingSetExporter; +import org.apache.rya.periodic.notification.api.BindingSetRecord; import org.apache.rya.periodic.notification.api.LifeCycle; import org.openrdf.query.BindingSet; diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java index 9baede3c6..8a0322fdf 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java +++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java @@ -30,8 +30,9 @@ import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.log4j.Logger; import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants; -import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter.ResultExportException; import org.apache.rya.periodic.notification.api.BindingSetExporter; +import org.apache.rya.periodic.notification.api.BindingSetRecord; +import org.apache.rya.periodic.notification.api.BindingSetRecordExportException; import org.openrdf.model.Literal; import org.openrdf.query.BindingSet; @@ -64,7 +65,7 @@ public KafkaPeriodicBindingSetExporter(KafkaProducer produce * the indicated BindingSetRecord and the BindingSet is then exported to the topic. */ @Override - public void exportNotification(BindingSetRecord record) throws ResultExportException { + public void exportNotification(BindingSetRecord record) throws BindingSetRecordExportException { String bindingName = IncrementalUpdateConstants.PERIODIC_BIN_ID; BindingSet bindingSet = record.getBindingSet(); String topic = record.getTopic(); @@ -75,7 +76,7 @@ public void exportNotification(BindingSetRecord record) throws ResultExportExcep //wait for confirmation that results have been received future.get(5, TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { - throw new ResultExportException(e.getMessage()); + throw new BindingSetRecordExportException(e.getMessage()); } } @@ -85,7 +86,7 @@ public void run() { while (!closed.get()) { exportNotification(bindingSets.take()); } - } catch (InterruptedException | ResultExportException e) { + } catch (InterruptedException | BindingSetRecordExportException e) { log.trace("Thread " + threadNumber + " is unable to process message."); } } diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/NotificationProcessorExecutor.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/NotificationProcessorExecutor.java index a363d5da0..a9a5ad1e4 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/NotificationProcessorExecutor.java +++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/NotificationProcessorExecutor.java @@ -27,9 +27,9 @@ import org.apache.log4j.Logger; import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; +import org.apache.rya.periodic.notification.api.BindingSetRecord; import org.apache.rya.periodic.notification.api.LifeCycle; import org.apache.rya.periodic.notification.api.NodeBin; -import org.apache.rya.periodic.notification.exporter.BindingSetRecord; import org.apache.rya.periodic.notification.notification.TimestampedNotification; import com.google.common.base.Preconditions; diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/TimestampedNotificationProcessor.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/TimestampedNotificationProcessor.java index baeb61139..8b65683b4 100644 --- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/TimestampedNotificationProcessor.java +++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/processor/TimestampedNotificationProcessor.java @@ -26,9 +26,9 @@ import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage; import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.CloseableIterator; import org.apache.rya.periodic.notification.api.BinPruner; +import org.apache.rya.periodic.notification.api.BindingSetRecord; import org.apache.rya.periodic.notification.api.NodeBin; import org.apache.rya.periodic.notification.api.NotificationProcessor; -import org.apache.rya.periodic.notification.exporter.BindingSetRecord; import org.apache.rya.periodic.notification.exporter.KafkaPeriodicBindingSetExporter; import org.apache.rya.periodic.notification.notification.TimestampedNotification; import org.openrdf.query.BindingSet; diff --git a/extras/rya.periodic.service/pom.xml b/extras/rya.periodic.service/pom.xml index fce499656..22ee1aa21 100644 --- a/extras/rya.periodic.service/pom.xml +++ b/extras/rya.periodic.service/pom.xml @@ -34,6 +34,7 @@ under the License. periodic.service.notification periodic.service.integration.tests + periodic.service.api \ No newline at end of file diff --git a/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java b/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java index 8fd95d316..7d0ab794c 100644 --- a/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java +++ b/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java @@ -57,7 +57,10 @@ public class RyaAdminCommands implements CommandMarker { public static final String CREATE_PCJ_CMD = "create-pcj"; + public static final String CREATE_PERIODIC_PCJ_CMD = "create-periodic-pcj"; public static final String DELETE_PCJ_CMD = "delete-pcj"; + public static final String DELETE_PERIODIC_PCJ_CMD = "delete-periodic-pcj"; + public static final String LIST_INCREMENTAL_QUERIES = "list-incremental-queries"; public static final String PRINT_INSTANCE_DETAILS_CMD = "print-instance-details"; public static final String INSTALL_CMD = "install"; public static final String INSTALL_PARAMETERS_CMD = "install-with-parameters"; @@ -129,7 +132,10 @@ public boolean areInstanceCommandsAvailable() { */ @CliAvailabilityIndicator({ CREATE_PCJ_CMD, - DELETE_PCJ_CMD }) + DELETE_PCJ_CMD, + CREATE_PERIODIC_PCJ_CMD, + DELETE_PERIODIC_PCJ_CMD, + LIST_INCREMENTAL_QUERIES}) public boolean arePCJCommandsAvailable() { // The PCJ commands are only available if the Shell is connected to an instance of Rya // that is new enough to use the RyaDetailsRepository and is configured to maintain PCJs. @@ -341,6 +347,79 @@ public String deletePcj( throw new RuntimeException("The PCJ could not be deleted. Provided reason: " + e.getMessage(), e); } } + + @CliCommand(value = CREATE_PERIODIC_PCJ_CMD, help = "Creates and starts the maintenance of a new Periodic PCJ and registers the associated Periodic Notification with Kafka.") + public String createPeriodicPcj( + @CliOption(key = {"topic"}, mandatory = true, help = "Kafka topic for registering new PeriodicNotifications. This topic is monitored by the Periodic Notification Service.") + String topic, + @CliOption(key = {"brokers"}, mandatory = true, help = "Comma delimited list of host/port pairs to establish the initial connection to the Kafka cluster.") + String brokers) { + // Fetch the command that is connected to the store. + final ShellState shellState = state.getShellState(); + final RyaClient commands = shellState.getConnectedCommands().get(); + final String ryaInstance = shellState.getRyaInstanceName().get(); + + try { + // Prompt the user for the SPARQL. + final Optional sparql = sparqlPrompt.getSparql(); + if (sparql.isPresent()) { + // Execute the command. + final String pcjId = commands.getCreatePeriodicPCJ().createPeriodicPCJ(ryaInstance, sparql.get(), topic, brokers); + // Return a message that indicates the ID of the newly created ID. + return String.format("The Periodic PCJ has been created. Its ID is '%s'.", pcjId); + } else { + return ""; // user aborted the SPARQL prompt. + } + } catch (final InstanceDoesNotExistException e) { + throw new RuntimeException(String.format("A Rya instance named '%s' does not exist.", ryaInstance), e); + } catch (final IOException | RyaClientException e) { + throw new RuntimeException("Could not create the Periodic PCJ. Provided reasons: " + e.getMessage(), e); + } + } + + @CliCommand(value = DELETE_PERIODIC_PCJ_CMD, help = "Deletes and halts maintenance of a Periodic PCJ.") + public String deletePeriodicPcj( + @CliOption(key = {"pcjId"}, mandatory = true, help = "The ID of the PCJ that will be deleted.") + final String pcjId, + @CliOption(key = {"topic"}, mandatory = true, help = "Kafka topic for registering a delete notice to remove a PeriodicNotification from the Periodic Notification Service.") + final String topic, + @CliOption(key = {"brokers"}, mandatory = true, help = "Comma delimited list of host/port pairs to establish the initial connection to the Kafka cluster.") + final String brokers + ) { + // Fetch the command that is connected to the store. + final ShellState shellState = state.getShellState(); + final RyaClient commands = shellState.getConnectedCommands().get(); + final String ryaInstance = shellState.getRyaInstanceName().get(); + + try { + // Execute the command. + commands.getDeletePeriodicPCJ().deletePeriodicPCJ(ryaInstance, pcjId, topic, brokers); + return "The Periodic PCJ has been deleted."; + + } catch (final InstanceDoesNotExistException e) { + throw new RuntimeException(String.format("A Rya instance named '%s' does not exist.", ryaInstance), e); + } catch (final RyaClientException e) { + throw new RuntimeException("The Periodic PCJ could not be deleted. Provided reason: " + e.getMessage(), e); + } + } + + + @CliCommand(value = LIST_INCREMENTAL_QUERIES, help = "Lists relevant information about all SPARQL queries maintained by the Fluo application.") + public String listFluoQueries() { + // Fetch the command that is connected to the store. + final ShellState shellState = state.getShellState(); + final RyaClient commands = shellState.getConnectedCommands().get(); + final String ryaInstance = shellState.getRyaInstanceName().get(); + + try { + return commands.getListIncrementalQueries().listIncrementalQueries(ryaInstance); + } catch (final InstanceDoesNotExistException e) { + throw new RuntimeException(String.format("A Rya instance named '%s' does not exist.", ryaInstance), e); + } catch (RyaClientException e) { + throw new RuntimeException("Could not list incremental queries. Provided reasons: " + e.getMessage(), e); + } + } + @CliCommand(value = ADD_USER_CMD, help = "Adds an authorized user to the Rya instance.") public void addUser( diff --git a/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java b/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java index cab34e98a..f08e02a7e 100644 --- a/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java +++ b/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java @@ -34,6 +34,7 @@ import org.apache.rya.api.client.AddUser; import org.apache.rya.api.client.CreatePCJ; import org.apache.rya.api.client.DeletePCJ; +import org.apache.rya.api.client.DeletePeriodicPCJ; import org.apache.rya.api.client.GetInstanceDetails; import org.apache.rya.api.client.Install; import org.apache.rya.api.client.Install.DuplicateInstanceNameException; @@ -45,6 +46,7 @@ import org.apache.rya.api.client.RyaClientException; import org.apache.rya.api.client.Uninstall; import org.apache.rya.api.client.CreatePCJ.ExportStrategy; +import org.apache.rya.api.client.CreatePeriodicPCJ; import org.apache.rya.api.client.accumulo.AccumuloConnectionDetails; import org.apache.rya.api.instance.RyaDetails; import org.apache.rya.api.instance.RyaDetails.EntityCentricIndexDetails; @@ -151,6 +153,69 @@ public void deletePCJ() throws InstanceDoesNotExistException, RyaClientException final String expected = "The PCJ has been deleted."; assertEquals(expected, message); } + + @Test + public void createPeriodicPCJ() throws InstanceDoesNotExistException, RyaClientException, IOException { + // Mock the object that performs the create operation. + final String instanceName = "unitTest"; + final String sparql = "SELECT * WHERE { ?person ?noun }"; + final String topic = "topic"; + final String brokers = "brokers"; + final String pcjId = "12341234"; + final CreatePeriodicPCJ mockCreatePCJ = mock(CreatePeriodicPCJ.class); + when(mockCreatePCJ.createPeriodicPCJ( eq(instanceName), eq(sparql), eq(topic), eq(brokers) )).thenReturn( pcjId ); + + final RyaClient mockCommands = mock(RyaClient.class); + when(mockCommands.getCreatePeriodicPCJ()).thenReturn( mockCreatePCJ ); + + final SharedShellState state = new SharedShellState(); + state.connectedToAccumulo(mock(AccumuloConnectionDetails.class), mockCommands); + state.connectedToInstance(instanceName); + + final SparqlPrompt mockSparqlPrompt = mock(SparqlPrompt.class); + when(mockSparqlPrompt.getSparql()).thenReturn(Optional.of(sparql)); + + // Execute the command. + final RyaAdminCommands commands = new RyaAdminCommands(state, mock(InstallPrompt.class), mockSparqlPrompt, mock(UninstallPrompt.class)); + final String message = commands.createPeriodicPcj(topic, brokers); + + // Verify the values that were provided to the command were passed through to CreatePCJ. + verify(mockCreatePCJ).createPeriodicPCJ(eq(instanceName), eq(sparql), eq(topic), eq(brokers)); + + // Verify a message is returned that explains what was created. + final String expected = "The Periodic PCJ has been created. Its ID is '12341234'."; + assertEquals(expected, message); + } + + @Test + public void deletePeriodicPCJ() throws InstanceDoesNotExistException, RyaClientException { + // Mock the object that performs the delete operation. + final DeletePeriodicPCJ mockDeletePCJ = mock(DeletePeriodicPCJ.class); + + final RyaClient mockCommands = mock(RyaClient.class); + when(mockCommands.getDeletePeriodicPCJ()).thenReturn( mockDeletePCJ ); + + final SharedShellState state = new SharedShellState(); + state.connectedToAccumulo(mock(AccumuloConnectionDetails.class), mockCommands); + final String instanceName = "unitTests"; + state.connectedToInstance(instanceName); + + // Execute the command. + final String pcjId = "123412342"; + final String topic = "topic"; + final String brokers = "brokers"; + + final RyaAdminCommands commands = new RyaAdminCommands(state, mock(InstallPrompt.class), mock(SparqlPrompt.class), mock(UninstallPrompt.class)); + final String message = commands.deletePeriodicPcj(pcjId, topic, brokers); + + // Verify the values that were provided to the command were passed through to the DeletePCJ. + verify(mockDeletePCJ).deletePeriodicPCJ(eq(instanceName), eq(pcjId), eq(topic), eq(brokers)); + + // Verify a message is returned that explains what was deleted. + final String expected = "The Periodic PCJ has been deleted."; + assertEquals(expected, message); + } + @Test public void getInstanceDetails() throws InstanceDoesNotExistException, RyaClientException { diff --git a/pom.xml b/pom.xml index 9516b9c40..51822be4d 100644 --- a/pom.xml +++ b/pom.xml @@ -243,6 +243,26 @@ under the License. org.apache.rya rya.pcj.fluo.app ${project.version} + + + org.apache.rya + rya.periodic.service + ${project.version} + + + org.apache.rya + rya.periodic.service.api + ${project.version} + + + org.apache.rya + rya.periodic.service.notification + ${project.version} + + + org.apache.rya + rya.periodic.service.integration.tests + ${project.version} org.apache.thrift