From b98b54e8961de342e4731b4421f7777ad429ae1d Mon Sep 17 00:00:00 2001 From: Aaron Mihalik Date: Fri, 4 Dec 2015 13:42:55 -0500 Subject: [PATCH 1/6] A Small Commit to wake up the Inrfa/github push --- temp.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 temp.txt diff --git a/temp.txt b/temp.txt new file mode 100644 index 000000000..e69de29bb From 5a03ef61abe6218997dbbcce870e57d8cef51bae Mon Sep 17 00:00:00 2001 From: Aaron Mihalik Date: Mon, 7 Dec 2015 07:02:38 -0500 Subject: [PATCH 2/6] Cannot delete temp branch, doc'd it. --- LICENSE | 202 - NOTICE | 6 - README.md | 302 +- common/pom.xml | 39 - common/rya.api/.gitignore | 1 - common/rya.api/pom.xml | 76 - ...nvalidValueTypeMarkerRuntimeException.java | 55 - .../api/RdfCloudTripleStoreConfiguration.java | 507 -- .../rya/api/RdfCloudTripleStoreConstants.java | 151 - .../rya/api/RdfCloudTripleStoreStatement.java | 72 - .../mvm/rya/api/RdfCloudTripleStoreUtils.java | 420 -- .../api/date/DateTimeTtlValueConverter.java | 80 - .../date/TimestampTtlStrValueConverter.java | 56 - .../api/date/TimestampTtlValueConverter.java | 56 - .../mvm/rya/api/date/TtlValueConverter.java | 41 - .../main/java/mvm/rya/api/domain/Node.java | 38 - .../java/mvm/rya/api/domain/RangeURI.java | 52 - .../java/mvm/rya/api/domain/RangeValue.java | 72 - .../java/mvm/rya/api/domain/RyaRange.java | 32 - .../java/mvm/rya/api/domain/RyaSchema.java | 43 - .../java/mvm/rya/api/domain/RyaStatement.java | 252 - .../main/java/mvm/rya/api/domain/RyaType.java | 111 - .../mvm/rya/api/domain/RyaTypePrefix.java | 59 - .../java/mvm/rya/api/domain/RyaTypeRange.java | 99 - .../main/java/mvm/rya/api/domain/RyaURI.java | 63 - .../java/mvm/rya/api/domain/RyaURIPrefix.java | 47 - .../java/mvm/rya/api/domain/RyaURIRange.java | 95 - .../domain/utils/RyaStatementWritable.java | 140 - .../rya/api/layout/TableLayoutStrategy.java | 40 - .../api/layout/TablePrefixLayoutStrategy.java | 85 - .../mvm/rya/api/persist/RdfDAOException.java | 44 - .../mvm/rya/api/persist/RdfEvalStatsDAO.java | 54 - .../mvm/rya/api/persist/RyaConfigured.java | 35 - .../main/java/mvm/rya/api/persist/RyaDAO.java | 126 - .../mvm/rya/api/persist/RyaDAOException.java | 43 - .../rya/api/persist/RyaNamespaceManager.java | 41 - .../persist/index/RyaSecondaryIndexer.java | 45 - .../joinselect/SelectivityEvalDAO.java | 37 - .../rya/api/persist/query/BatchRyaQuery.java | 115 - .../mvm/rya/api/persist/query/RyaQuery.java | 97 - .../rya/api/persist/query/RyaQueryEngine.java | 96 - .../api/persist/query/RyaQueryOptions.java | 246 - .../rya/api/persist/query/join/HashJoin.java | 158 - .../api/persist/query/join/IterativeJoin.java | 233 - .../mvm/rya/api/persist/query/join/Join.java | 44 - .../rya/api/persist/query/join/MergeJoin.java | 244 - .../rya/api/persist/utils/RyaDAOHelper.java | 176 - .../AbstractTriplePatternStrategy.java | 93 - .../mvm/rya/api/query/strategy/ByteRange.java | 45 - .../query/strategy/TriplePatternStrategy.java | 49 - ...HashedPoWholeRowTriplePatternStrategy.java | 135 - ...ashedSpoWholeRowTriplePatternStrategy.java | 138 - .../OspWholeRowTriplePatternStrategy.java | 113 - .../PoWholeRowTriplePatternStrategy.java | 128 - .../SpoWholeRowTriplePatternStrategy.java | 130 - .../CustomRyaTypeResolverMapping.java | 57 - .../rya/api/resolver/RdfToRyaConversions.java | 93 - .../java/mvm/rya/api/resolver/RyaContext.java | 192 - .../rya/api/resolver/RyaToRdfConversions.java | 75 - .../rya/api/resolver/RyaTripleContext.java | 123 - .../mvm/rya/api/resolver/RyaTypeResolver.java | 60 - .../resolver/RyaTypeResolverException.java | 43 - .../api/resolver/RyaTypeResolverMapping.java | 57 - .../resolver/impl/BooleanRyaTypeResolver.java | 61 - .../resolver/impl/ByteRyaTypeResolver.java | 63 - .../resolver/impl/CustomDatatypeResolver.java | 70 - .../impl/DateTimeRyaTypeResolver.java | 76 - .../resolver/impl/DoubleRyaTypeResolver.java | 68 - .../resolver/impl/FloatRyaTypeResolver.java | 64 - .../resolver/impl/IntegerRyaTypeResolver.java | 67 - .../resolver/impl/LongRyaTypeResolver.java | 68 - .../resolver/impl/RyaTypeResolverImpl.java | 124 - .../rya/api/resolver/impl/RyaURIResolver.java | 44 - .../ServiceBackedRyaTypeResolverMappings.java | 45 - .../resolver/impl/ShortRyaTypeResolver.java | 65 - .../rya/api/resolver/triple/TripleRow.java | 107 - .../api/resolver/triple/TripleRowRegex.java | 84 - .../resolver/triple/TripleRowResolver.java | 43 - .../triple/TripleRowResolverException.java | 43 - .../impl/WholeRowHashedTripleResolver.java | 154 - .../triple/impl/WholeRowTripleResolver.java | 139 - .../rya/api/security/SecurityProvider.java | 28 - .../api/utils/CloseableIterableIteration.java | 76 - .../mvm/rya/api/utils/EnumerationWrapper.java | 58 - .../mvm/rya/api/utils/IteratorWrapper.java | 58 - .../rya/api/utils/NullableStatementImpl.java | 105 - .../api/utils/PeekingCloseableIteration.java | 74 - .../RyaStatementAddBindingSetFunction.java | 40 - ...entRemoveBindingSetCloseableIteration.java | 61 - .../mvm/rya/api/domain/RyaURIPrefixTest.java | 37 - .../rya/api/persist/query/RyaQueryTest.java | 63 - .../AbstractTriplePatternStrategyTest.java | 192 - ...edPoWholeRowTriplePatternStrategyTest.java | 175 - ...dSpoWholeRowTriplePatternStrategyTest.java | 199 - .../wholerow/MockRdfCloudConfiguration.java | 32 - .../OspWholeRowTriplePatternStrategyTest.java | 135 - .../PoWholeRowTriplePatternStrategyTest.java | 159 - .../SpoWholeRowTriplePatternStrategyTest.java | 185 - .../mvm/rya/api/resolver/RyaContextTest.java | 86 - .../impl/CustomDatatypeResolverTest.java | 40 - .../impl/DateTimeRyaTypeResolverTest.java | 177 - .../impl/DoubleRyaTypeResolverTest.java | 46 - .../impl/IntegerRyaTypeResolverTest.java | 44 - .../impl/LongRyaTypeResolverTest.java | 49 - .../api/resolver/impl/RyaURIResolverTest.java | 40 - .../HashedWholeRowTripleResolverTest.java | 124 - .../impl/WholeRowTripleResolverTest.java | 127 - .../java/mvm/rya/api/utils/RdfIOTest.java | 67 - common/rya.provenance/pom.xml | 53 - .../LoggingProvenanceCollector.java | 41 - .../ProvenanceCollectionException.java | 39 - .../provenance/ProvenanceCollector.java | 34 - .../TriplestoreProvenanceCollector.java | 68 - .../provenance/rdf/BaseProvenanceModel.java | 68 - .../provenance/rdf/RDFProvenanceModel.java | 33 - .../TriplestoreProvenanceCollectorTest.java | 51 - .../rdf/BaseProvenanceModelTest.java | 38 - dao/accumulo.rya/pom.xml | 93 - .../AccumuloNamespaceTableIterator.java | 99 - .../accumulo/AccumuloRdfConfiguration.java | 86 - .../rya/accumulo/AccumuloRdfConstants.java | 40 - .../rya/accumulo/AccumuloRdfEvalStatsDAO.java | 173 - .../accumulo/AccumuloRdfQueryIterator.java | 297 -- .../mvm/rya/accumulo/AccumuloRdfUtils.java | 72 - .../java/mvm/rya/accumulo/AccumuloRyaDAO.java | 523 -- .../DefineTripleQueryRangeFactory.java | 152 - .../mvm/rya/accumulo/RyaTableKeyValues.java | 115 - .../accumulo/RyaTableMutationsFactory.java | 102 - .../experimental/AbstractAccumuloIndexer.java | 59 - .../experimental/AccumuloIndexer.java | 33 - .../accumulo/mr/AbstractAccumuloMRTool.java | 164 - .../mr/eval/AccumuloRdfCountTool.java | 258 - .../mr/fileinput/BulkNtripsInputTool.java | 369 -- .../mr/fileinput/RdfFileInputByLineTool.java | 251 - .../mr/fileinput/RdfFileInputFormat.java | 146 - .../mr/fileinput/RdfFileInputTool.java | 175 - .../accumulo/mr/upgrade/Upgrade322Tool.java | 240 - .../mr/utils/AccumuloHDFSFileInputFormat.java | 206 - .../rya/accumulo/mr/utils/AccumuloProps.java | 58 - .../mvm/rya/accumulo/mr/utils/MRUtils.java | 119 - .../query/AccumuloRyaQueryEngine.java | 402 -- .../query/KeyValueToRyaStatementFunction.java | 72 - .../query/RangeBindingSetEntries.java | 58 - ...yaStatementBindingSetKeyValueIterator.java | 154 - .../query/RyaStatementKeyValueIterator.java | 107 - .../query/ScannerBaseCloseableIterable.java | 56 - .../rya/accumulo/utils/TimeRangeFilter.java | 87 - .../AccumuloRdfConfigurationTest.java | 59 - .../mvm/rya/accumulo/AccumuloRyaDAOTest.java | 665 --- .../DefineTripleQueryRangeFactoryTest.java | 265 - .../mr/eval/AccumuloRdfCountToolTest.java | 282 -- .../mr/fileinput/RdfFileInputToolTest.java | 146 - .../mr/upgrade/Upgrade322ToolTest.java | 319 -- .../UpgradeObjectSerializationTest.java | 119 - .../src/test/resources/namedgraphs.trig | 7 - .../src/test/resources/test.ntriples | 1 - dao/mongodb.rya/pom.xml | 48 - .../mvm/rya/mongodb/MongoDBQueryEngine.java | 207 - .../rya/mongodb/MongoDBRdfConfiguration.java | 121 - .../java/mvm/rya/mongodb/MongoDBRyaDAO.java | 222 - .../mongodb/dao/MongoDBNamespaceManager.java | 35 - .../mongodb/dao/MongoDBStorageStrategy.java | 41 - .../dao/SimpleMongoDBNamespaceManager.java | 181 - .../dao/SimpleMongoDBStorageStrategy.java | 152 - ...onCloseableRyaStatementCursorIterator.java | 57 - .../RyaStatementBindingSetCursorIterator.java | 108 - .../iter/RyaStatementCursorIterable.java | 67 - .../iter/RyaStatementCursorIterator.java | 104 - dao/pom.xml | 39 - extras/indexing/pom.xml | 128 - .../documentIndex/DocIndexIteratorUtil.java | 31 - .../DocumentIndexIntersectingIterator.java | 850 ---- .../accumulo/documentIndex/TextColumn.java | 108 - .../rya/accumulo/mr/NullFreeTextIndexer.java | 70 - .../mvm/rya/accumulo/mr/NullGeoIndexer.java | 121 - .../rya/accumulo/mr/NullTemporalIndexer.java | 154 - .../mvm/rya/accumulo/mr/RyaOutputFormat.java | 329 -- .../rya/accumulo/mr/StatementWritable.java | 86 - .../BulkNtripsInputToolIndexing.java | 227 - .../mr/fileinput/RyaBatchWriterInputTool.java | 243 - .../AccumuloPrecompQueryIndexer.java | 326 -- .../java/mvm/rya/indexing/DocIdIndexer.java | 47 - .../rya/indexing/FilterFunctionOptimizer.java | 358 -- .../mvm/rya/indexing/FreeTextIndexer.java | 62 - .../java/mvm/rya/indexing/GeoIndexer.java | 201 - .../ExternalIndexMatcher.java | 34 - .../GeneralizedExternalProcessor.java | 730 --- .../IndexPlanValidator/IndexListPruner.java | 35 - .../IndexPlanValidator.java | 210 - .../IndexTupleGenerator.java | 33 - .../IndexedExecutionPlanGenerator.java | 207 - .../IndexedQueryPlanSelector.java | 32 - .../ThreshholdPlanSelector.java | 240 - .../TupleExecutionPlanGenerator.java | 215 - .../IndexPlanValidator/TupleReArranger.java | 348 -- .../IndexPlanValidator/TupleValidator.java | 34 - .../ValidIndexCombinationGenerator.java | 671 --- .../VarConstantIndexListPruner.java | 171 - .../java/mvm/rya/indexing/IndexingExpr.java | 94 - .../indexing/IndexingFunctionRegistry.java | 136 - .../mvm/rya/indexing/IteratorFactory.java | 159 - .../main/java/mvm/rya/indexing/KeyParts.java | 331 -- .../mvm/rya/indexing/PrecompQueryIndexer.java | 63 - .../java/mvm/rya/indexing/RyaSailFactory.java | 84 - .../java/mvm/rya/indexing/SearchFunction.java | 45 - .../rya/indexing/SearchFunctionFactory.java | 71 - .../mvm/rya/indexing/StatementContraints.java | 73 - .../mvm/rya/indexing/TemporalIndexer.java | 183 - .../mvm/rya/indexing/TemporalInstant.java | 83 - .../mvm/rya/indexing/TemporalInterval.java | 181 - .../rya/indexing/accumulo/ConfigUtils.java | 424 -- .../mvm/rya/indexing/accumulo/Md5Hash.java | 45 - .../accumulo/StatementSerializer.java | 227 - .../accumulo/entity/AccumuloDocIdIndexer.java | 450 -- .../accumulo/entity/EntityCentricIndex.java | 252 - .../entity/EntityLocalityGroupSetter.java | 171 - .../accumulo/entity/EntityOptimizer.java | 436 -- .../accumulo/entity/EntityTupleSet.java | 264 - .../indexing/accumulo/entity/StarQuery.java | 636 --- .../freetext/AccumuloFreeTextIndexer.java | 611 --- .../accumulo/freetext/ColumnPrefixes.java | 120 - .../accumulo/freetext/FreeTextTupleSet.java | 160 - .../accumulo/freetext/LuceneTokenizer.java | 57 - .../accumulo/freetext/SimpleTokenizer.java | 43 - .../indexing/accumulo/freetext/Tokenizer.java | 31 - .../freetext/iterators/AndingIterator.java | 563 --- .../iterators/BooleanTreeIterator.java | 322 -- .../freetext/query/ASTExpression.java | 63 - .../accumulo/freetext/query/ASTNodeUtils.java | 210 - .../freetext/query/ASTSimpleNode.java | 917 ---- .../accumulo/freetext/query/ASTTerm.java | 79 - .../freetext/query/JJTQueryParserState.java | 1024 ---- .../accumulo/freetext/query/Node.java | 937 ---- .../freetext/query/ParseException.java | 1088 ----- .../accumulo/freetext/query/QueryParser.java | 1293 ----- .../accumulo/freetext/query/QueryParser.jj | 176 - .../accumulo/freetext/query/QueryParser.jjt | 90 - .../freetext/query/QueryParserConstants.java | 960 ---- .../query/QueryParserTokenManager.java | 1389 ------ .../query/QueryParserTreeConstants.java | 920 ---- .../freetext/query/SimpleCharStream.java | 1372 ------ .../accumulo/freetext/query/SimpleNode.java | 980 ---- .../accumulo/freetext/query/Token.java | 1032 ---- .../freetext/query/TokenMgrError.java | 1048 ---- .../indexing/accumulo/geo/GeoConstants.java | 45 - .../accumulo/geo/GeoMesaGeoIndexer.java | 447 -- .../indexing/accumulo/geo/GeoParseUtils.java | 46 - .../indexing/accumulo/geo/GeoTupleSet.java | 364 -- .../temporal/AccumuloTemporalIndexer.java | 824 ---- .../temporal/TemporalInstantRfc3339.java | 218 - .../accumulo/temporal/TemporalTupleSet.java | 320 -- .../indexing/external/ExternalIndexMain.java | 219 - .../indexing/external/ExternalProcessor.java | 726 --- .../rya/indexing/external/ExternalSail.java | 86 - .../external/ExternalSailExample.java | 124 - .../external/PrecompJoinOptimizer.java | 773 --- .../external/QueryVariableNormalizer.java | 1180 ----- .../external/tupleSet/AccumuloIndexSet.java | 626 --- .../external/tupleSet/ExternalTupleSet.java | 213 - .../tupleSet/SimpleExternalTupleSet.java | 88 - .../mongodb/AbstractMongoIndexer.java | 73 - .../mongodb/GeoMongoDBStorageStrategy.java | 185 - .../rya/indexing/mongodb/MongoGeoIndexer.java | 259 - .../indexing/mongodb/MongoGeoTupleSet.java | 361 -- .../ValidIndexCombinationGeneratorTest.java | 507 -- ...DocumentIndexIntersectingIteratorTest.java | 1903 -------- .../GeneralizedExternalProcessorTest.java | 325 -- .../IndexPlanValidatorTest.java | 1148 ----- .../IndexedExecutionPlanGeneratorTest.java | 423 -- .../ThreshholdPlanSelectorTest.java | 838 ---- .../TupleExecutionPlanGeneratorTest.java | 364 -- .../TupleReArrangerTest.java | 141 - .../ValidIndexCombinationGeneratorTest.java | 620 --- .../VarConstantIndexListPrunerTest.java | 329 -- .../accumulo/StatementSerializerTest.java | 106 - .../entity/AccumuloDocIndexerTest.java | 2125 -------- .../accumulo/entity/EntityOptimizerTest.java | 1357 ------ .../accumulo/entity/StarQueryTest.java | 290 -- .../freetext/AccumuloFreeTextIndexerTest.java | 221 - .../freetext/query/QueryParserTest.java | 130 - .../accumulo/geo/GeoIndexerSfTest.java | 316 -- .../indexing/accumulo/geo/GeoIndexerTest.java | 370 -- .../temporal/AccumuloTemporalIndexerTest.java | 1040 ---- .../temporal/TemporalInstantTest.java | 96 - .../temporal/TemporalIntervalTest.java | 178 - .../AccumuloConstantIndexSetTest.java | 831 ---- .../external/AccumuloIndexSetTest.java | 4330 ----------------- .../external/AccumuloIndexSetTest2.java | 803 --- .../PrecompJoinOptimizerIntegrationTest.java | 550 --- .../external/PrecompJoinOptimizerTest.java | 521 -- .../tupleSet/ExternalProcessorTest.java | 1654 ------- .../tupleSet/QueryVariableNormalizerTest.java | 965 ---- .../VarConstExternalProcessorTest.java | 490 -- .../VarConstQueryVariableNormalizerTest.java | 747 --- extras/indexingExample/pom.xml | 99 - .../src/main/assembly/assembly.xml | 70 - .../src/main/java/EntityDirectExample.java | 311 -- .../src/main/java/MongoRyaDirectExample.java | 307 -- .../src/main/java/RyaDirectExample.java | 700 --- .../src/main/scripts/RunRyaDirectExample.bat | 41 - extras/pom.xml | 43 - extras/rya.console/.gitignore | 8 - extras/rya.console/pom.xml | 100 - .../mvm/rya/console/RyaBannerProvider.java | 69 - .../mvm/rya/console/RyaConsoleCommands.java | 230 - .../console/RyaHistoryFileNameProvider.java | 47 - .../mvm/rya/console/RyaPromptProvider.java | 47 - .../META-INF/spring/spring-shell-plugin.xml | 30 - extras/rya.manual/pom.xml | 53 - extras/rya.manual/src/site/markdown/_index.md | 44 - extras/rya.manual/src/site/markdown/alx.md | 82 - .../src/site/markdown/build-source.md | 36 - extras/rya.manual/src/site/markdown/eval.md | 79 - extras/rya.manual/src/site/markdown/index.md | 45 - extras/rya.manual/src/site/markdown/infer.md | 35 - .../src/site/markdown/loadPrecomputedJoin.md | 49 - .../rya.manual/src/site/markdown/loaddata.md | 142 - .../rya.manual/src/site/markdown/overview.md | 26 - .../rya.manual/src/site/markdown/querydata.md | 137 - .../src/site/markdown/quickstart.md | 62 - .../src/site/markdown/sm-addauth.md | 119 - .../src/site/markdown/sm-firststeps.md | 80 - .../rya.manual/src/site/markdown/sm-infer.md | 353 -- .../src/site/markdown/sm-namedgraph.md | 157 - .../src/site/markdown/sm-simpleaqr.md | 75 - .../src/site/markdown/sm-sparqlquery.md | 79 - .../src/site/markdown/sm-updatedata.md | 83 - .../src/site/resources/js/fixmarkdownlinks.js | 25 - extras/rya.manual/src/site/site.xml | 65 - extras/rya.prospector/pom.xml | 109 - .../rya/prospector/domain/IndexEntry.groovy | 76 - .../domain/IntermediateProspect.groovy | 70 - .../prospector/domain/TripleValueType.java | 26 - .../mvm/rya/prospector/mr/Prospector.groovy | 108 - .../prospector/mr/ProspectorCombiner.groovy | 61 - .../rya/prospector/mr/ProspectorMapper.groovy | 75 - .../prospector/mr/ProspectorReducer.groovy | 57 - .../rya/prospector/plans/IndexWorkPlan.groovy | 51 - .../plans/IndexWorkPlanManager.groovy | 29 - .../prospector/plans/impl/CountPlan.groovy | 220 - .../ServicesBackedIndexWorkPlanManager.groovy | 38 - .../service/ProspectorService.groovy | 126 - .../ProspectorServiceEvalStatsDAO.groovy | 122 - .../rya/prospector/utils/CustomEntry.groovy | 52 - .../utils/ProspectorConstants.groovy | 41 - .../prospector/utils/ProspectorUtils.groovy | 138 - .../AccumuloSelectivityEvalDAO.java | 640 --- .../rya/joinselect/CardinalityCalcUtil.java | 267 - .../mvm/rya/joinselect/mr/FullTableSize.java | 129 - .../joinselect/mr/JoinSelectAggregate.java | 272 -- .../rya/joinselect/mr/JoinSelectDriver.java | 60 - .../mr/JoinSelectProspectOutput.java | 124 - .../mr/JoinSelectSpoTableOutput.java | 126 - .../mr/JoinSelectStatisticsSum.java | 220 - .../mvm/rya/joinselect/mr/utils/CardList.java | 209 - .../joinselect/mr/utils/CardinalityType.java | 149 - .../joinselect/mr/utils/CompositeType.java | 122 - .../mr/utils/JoinSelectConstants.java | 46 - .../mr/utils/JoinSelectStatsUtil.java | 183 - .../rya/joinselect/mr/utils/TripleCard.java | 145 - .../rya/joinselect/mr/utils/TripleEntry.java | 180 - .../mvm.rya.prospector.plans.IndexWorkPlan | 1 - .../rya/prospector/mr/ProspectorTest.groovy | 178 - .../ProspectorServiceEvalStatsDAOTest.groovy | 182 - .../AccumuloSelectivityEvalDAOTest.java | 592 --- .../mr/CardinalityIdentityReducerTest.java | 141 - .../joinselect/mr/CardinalityMapperTest.java | 76 - .../rya/joinselect/mr/FullTableSizeTest.java | 64 - .../rya/joinselect/mr/JoinReducerTest.java | 124 - .../joinselect/mr/JoinSelectMapperTest.java | 94 - .../mr/JoinSelectProspectOutputTest.java | 89 - .../mr/JoinSelectStatisticsSumTest.java | 60 - .../mr/JoinSelectStatisticsTest.java | 872 ---- .../test/resources/stats_cluster_config.xml | 97 - extras/tinkerpop.rya/pom.xml | 104 - .../config/RyaGraphConfiguration.groovy | 103 - .../rya/blueprints/sail/RyaSailEdge.groovy | 94 - .../sail/RyaSailEdgeSequence.groovy | 110 - .../rya/blueprints/sail/RyaSailGraph.groovy | 131 - .../rya/blueprints/sail/RyaSailVertex.groovy | 89 - .../sail/RyaSailVertexSequence.groovy | 94 - .../mvm/rya/blueprints/sail/RyaSailEdge.java | 101 - .../rya/blueprints/sail/RyaSailVertex.java | 105 - .../mvm/rya/blueprints/TstGremlinRya.groovy | 111 - .../config/RyaGraphConfigurationTest.groovy | 129 - .../sail/RyaSailVertexSequenceTest.groovy | 100 - .../src/test/resources/log4j.properties | 19 - osgi/alx.rya.console/pom.xml | 61 - .../rya/alx/command/AbstractRyaCommand.java | 58 - .../alx/command/GetStatementsRyaCommand.java | 80 - .../mvm/rya/alx/command/InfoRyaCommand.java | 46 - .../blueprint/alx.rya.console-blueprint.xml | 34 - osgi/alx.rya/pom.xml | 86 - .../src/main/features/alx.rya-features.xml | 104 - .../rya/alx/util/ConfigurationFactory.java | 53 - .../META-INF/spring/alx.rya-spring-osgi.xml | 53 - .../META-INF/spring/alx.rya-spring.xml | 70 - .../src/main/resources/ROOT/crossdomain.xml | 25 - osgi/camel.rya/pom.xml | 70 - .../mvm/rya/camel/cbsail/CbSailComponent.java | 59 - .../mvm/rya/camel/cbsail/CbSailEndpoint.java | 119 - .../mvm/rya/camel/cbsail/CbSailProducer.java | 175 - .../org/apache/camel/component/cbsail | 1 - .../camel/cbsail/CbSailIntegrationTest.java | 117 - .../mvm/rya/camel/cbsail/CbSailPojoMain.java | 45 - .../java/mvm/rya/camel/cbsail/CbSailTest.java | 205 - osgi/pom.xml | 91 - .../openrdf-sesame-osgi.bnd | 7 - osgi/sesame-runtime-osgi/pom.xml | 139 - pig/accumulo.pig/pom.xml | 84 - .../mvm/rya/accumulo/pig/AccumuloStorage.java | 383 -- .../rya/accumulo/pig/IndexWritingTool.java | 348 -- .../accumulo/pig/SparqlQueryPigEngine.java | 268 - .../pig/SparqlToPigTransformVisitor.java | 345 -- .../accumulo/pig/StatementPatternStorage.java | 304 -- .../optimizer/SimilarVarJoinOptimizer.java | 210 - .../rya/accumulo/pig/AccumuloStorageTest.java | 284 -- .../accumulo/pig/IndexWritingToolTest.java | 326 -- .../pig/SparqlQueryPigEngineTest.java | 76 - .../pig/SparqlToPigTransformVisitorTest.java | 402 -- .../pig/StatementPatternStorageTest.java | 185 - .../src/test/resources/ResultsFile1.txt | 8 - .../src/test/resources/testQuery.txt | 7 - .../src/test/resources/testQuery2.txt | 4 - pig/pom.xml | 38 - sail/pom.xml | 98 - .../rdftriplestore/RdfCloudTripleStore.java | 179 - .../RdfCloudTripleStoreConnection.java | 623 --- .../RdfCloudTripleStoreFactory.java | 56 - .../RdfCloudTripleStoreSailConfig.java | 133 - .../rya/rdftriplestore/RyaSailRepository.java | 53 - .../RyaSailRepositoryConnection.java | 109 - .../evaluation/ExternalBatchingIterator.java | 33 - .../ExternalMultipleBindingSetsIterator.java | 109 - .../evaluation/FilterRangeVisitor.java | 97 - .../MultipleBindingSetsIterator.java | 108 - .../ParallelEvaluationStrategyImpl.java | 281 -- .../evaluation/ParallelJoinIterator.java | 139 - .../evaluation/PushJoinDownVisitor.java | 57 - .../evaluation/QueryJoinOptimizer.java | 284 -- .../evaluation/QueryJoinSelectOptimizer.java | 260 - ...fCloudTripleStoreEvaluationStatistics.java | 281 -- ...eStoreSelectivityEvaluationStatistics.java | 128 - .../evaluation/ReorderJoinVisitor.java | 70 - .../SeparateFilterJoinsVisitor.java | 55 - .../inference/AbstractInferVisitor.java | 108 - .../inference/DoNotExpandSP.java | 51 - .../inference/InferConstants.java | 34 - .../rdftriplestore/inference/InferJoin.java | 50 - .../rdftriplestore/inference/InferUnion.java | 48 - .../inference/InferenceEngine.java | 410 -- .../inference/InferenceEngineException.java | 43 - .../inference/InverseOfVisitor.java | 80 - .../inference/SameAsVisitor.java | 187 - .../inference/SubClassOfVisitor.java | 108 - .../inference/SubPropertyOfVisitor.java | 121 - .../inference/SymmetricPropertyVisitor.java | 78 - .../inference/TransitivePropertyVisitor.java | 69 - .../namespace/NamespaceManager.java | 167 - .../utils/CombineContextsRdfInserter.java | 165 - .../utils/DefaultStatistics.java | 58 - .../utils/FixedStatementPattern.java | 59 - .../utils/TransitivePropertySP.java | 52 - .../META-INF/org.openrdf.store.schemas | 1 - .../schemas/cloudbasestore-schema.ttl | 20 - .../org.openrdf.sail.config.SailFactory | 1 - sail/src/main/resources/ehcache.xml | 46 - .../mvm/rya/ArbitraryLengthQueryTest.java | 500 -- sail/src/test/java/mvm/rya/HashJoinTest.java | 374 -- .../test/java/mvm/rya/IterativeJoinTest.java | 365 -- sail/src/test/java/mvm/rya/MergeJoinTest.java | 370 -- .../RdfCloudTripleStoreConnectionTest.java | 1363 ------ .../java/mvm/rya/RdfCloudTripleStoreTest.java | 699 --- .../mvm/rya/RdfCloudTripleStoreUtilsTest.java | 86 - .../QueryJoinSelectOptimizerTest.java | 992 ---- ...reSelectivityEvaluationStatisticsTest.java | 304 -- .../rya/triplestore/inference/SameAsTest.java | 115 - sail/src/test/resources/cdrdf.xml | 41 - sail/src/test/resources/namedgraphs.trig | 37 - sail/src/test/resources/ntriples.nt | 1 - sail/src/test/resources/reification.xml | 36 - sail/src/test/resources/univ-bench.owl | 466 -- src/license/apacheV2Header.ftl | 16 - temp.txt | 0 web/pom.xml | 38 - web/web.rya/pom.xml | 136 - web/web.rya/resources/environment.properties | 27 - .../cloudbase/sail/AbstractRDFWebServlet.java | 111 - .../web/cloudbase/sail/DeleteDataServlet.java | 66 - .../web/cloudbase/sail/LoadDataServlet.java | 76 - .../web/cloudbase/sail/QueryDataServlet.java | 185 - .../cloudbase/sail/QuerySerqlDataServlet.java | 136 - .../web/cloudbase/sail/RDFWebConstants.java | 35 - .../mvm/cloud/rdf/web/sail/RdfController.java | 344 -- .../mvm/cloud/rdf/web/sail/ResultFormat.java | 27 - .../rdf/web/sail/SecurityProviderImpl.java | 34 - .../webapp/WEB-INF/spring/spring-accumulo.xml | 51 - .../WEB-INF/spring/spring-cloudbase.xml | 50 - .../webapp/WEB-INF/spring/spring-mongodb.xml | 43 - .../WEB-INF/spring/spring-root-extensions.xml | 105 - .../webapp/WEB-INF/spring/spring-root.xml | 68 - .../webapp/WEB-INF/spring/spring-security.xml | 34 - web/web.rya/src/main/webapp/WEB-INF/web.xml | 166 - web/web.rya/src/main/webapp/crossdomain.xml | 25 - web/web.rya/src/main/webapp/sparqlQuery.jsp | 79 - .../cloudbase/sail/DeleteDataServletRun.java | 476 -- .../cloudbase/sail/LoadDataServletRun.java | 66 - .../cloudbase/sail/QueryDataServletRun.java | 467 -- .../sail/RdfControllerIntegrationTest.java | 86 - .../cloud/rdf/web/sail/RdfControllerTest.java | 160 - web/web.rya/src/test/resources/cdrdf.xml | 41 - .../controllerIntegrationTest-accumulo.xml | 50 - .../controllerIntegrationTest-root.xml | 62 - .../test/resources/controllerTest-context.xml | 40 - .../test/resources/dummyData/memorystore.data | Bin 290 -> 0 bytes .../src/test/resources/namedgraphs.trig | 37 - web/web.rya/src/test/resources/test.nt | 3 - 517 files changed, 4 insertions(+), 107267 deletions(-) delete mode 100644 LICENSE delete mode 100644 NOTICE delete mode 100644 common/pom.xml delete mode 100644 common/rya.api/.gitignore delete mode 100644 common/rya.api/pom.xml delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/Node.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java delete mode 100644 common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java delete mode 100644 common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java delete mode 100644 common/rya.provenance/pom.xml delete mode 100644 common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java delete mode 100644 common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java delete mode 100644 common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java delete mode 100644 common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java delete mode 100644 common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java delete mode 100644 common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java delete mode 100644 common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java delete mode 100644 common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java delete mode 100644 dao/accumulo.rya/pom.xml delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java delete mode 100644 dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java delete mode 100644 dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java delete mode 100644 dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java delete mode 100644 dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java delete mode 100644 dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java delete mode 100644 dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java delete mode 100644 dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java delete mode 100644 dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java delete mode 100644 dao/accumulo.rya/src/test/resources/namedgraphs.trig delete mode 100644 dao/accumulo.rya/src/test/resources/test.ntriples delete mode 100644 dao/mongodb.rya/pom.xml delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java delete mode 100644 dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java delete mode 100644 dao/pom.xml delete mode 100644 extras/indexing/pom.xml delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jj delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java delete mode 100644 extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java delete mode 100644 extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java delete mode 100644 extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java delete mode 100644 extras/indexingExample/pom.xml delete mode 100644 extras/indexingExample/src/main/assembly/assembly.xml delete mode 100644 extras/indexingExample/src/main/java/EntityDirectExample.java delete mode 100644 extras/indexingExample/src/main/java/MongoRyaDirectExample.java delete mode 100644 extras/indexingExample/src/main/java/RyaDirectExample.java delete mode 100644 extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat delete mode 100644 extras/pom.xml delete mode 100644 extras/rya.console/.gitignore delete mode 100644 extras/rya.console/pom.xml delete mode 100644 extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java delete mode 100644 extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java delete mode 100644 extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java delete mode 100644 extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java delete mode 100644 extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml delete mode 100644 extras/rya.manual/pom.xml delete mode 100644 extras/rya.manual/src/site/markdown/_index.md delete mode 100644 extras/rya.manual/src/site/markdown/alx.md delete mode 100644 extras/rya.manual/src/site/markdown/build-source.md delete mode 100644 extras/rya.manual/src/site/markdown/eval.md delete mode 100644 extras/rya.manual/src/site/markdown/index.md delete mode 100644 extras/rya.manual/src/site/markdown/infer.md delete mode 100644 extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md delete mode 100644 extras/rya.manual/src/site/markdown/loaddata.md delete mode 100644 extras/rya.manual/src/site/markdown/overview.md delete mode 100644 extras/rya.manual/src/site/markdown/querydata.md delete mode 100644 extras/rya.manual/src/site/markdown/quickstart.md delete mode 100644 extras/rya.manual/src/site/markdown/sm-addauth.md delete mode 100644 extras/rya.manual/src/site/markdown/sm-firststeps.md delete mode 100644 extras/rya.manual/src/site/markdown/sm-infer.md delete mode 100644 extras/rya.manual/src/site/markdown/sm-namedgraph.md delete mode 100644 extras/rya.manual/src/site/markdown/sm-simpleaqr.md delete mode 100644 extras/rya.manual/src/site/markdown/sm-sparqlquery.md delete mode 100644 extras/rya.manual/src/site/markdown/sm-updatedata.md delete mode 100644 extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js delete mode 100644 extras/rya.manual/src/site/site.xml delete mode 100644 extras/rya.prospector/pom.xml delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy delete mode 100644 extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java delete mode 100644 extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java delete mode 100644 extras/rya.prospector/src/main/resources/META-INF/services/mvm.rya.prospector.plans.IndexWorkPlan delete mode 100644 extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy delete mode 100644 extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java delete mode 100644 extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java delete mode 100644 extras/rya.prospector/src/test/resources/stats_cluster_config.xml delete mode 100644 extras/tinkerpop.rya/pom.xml delete mode 100644 extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy delete mode 100644 extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy delete mode 100644 extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy delete mode 100644 extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy delete mode 100644 extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy delete mode 100644 extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy delete mode 100644 extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java delete mode 100644 extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java delete mode 100644 extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy delete mode 100644 extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy delete mode 100644 extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy delete mode 100644 extras/tinkerpop.rya/src/test/resources/log4j.properties delete mode 100644 osgi/alx.rya.console/pom.xml delete mode 100644 osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java delete mode 100644 osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java delete mode 100644 osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java delete mode 100644 osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml delete mode 100644 osgi/alx.rya/pom.xml delete mode 100644 osgi/alx.rya/src/main/features/alx.rya-features.xml delete mode 100644 osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java delete mode 100644 osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml delete mode 100644 osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml delete mode 100644 osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml delete mode 100644 osgi/camel.rya/pom.xml delete mode 100644 osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java delete mode 100644 osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java delete mode 100644 osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java delete mode 100644 osgi/camel.rya/src/main/resources/META-INF/services/org/apache/camel/component/cbsail delete mode 100644 osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java delete mode 100644 osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java delete mode 100644 osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java delete mode 100644 osgi/pom.xml delete mode 100644 osgi/sesame-runtime-osgi/openrdf-sesame-osgi.bnd delete mode 100644 osgi/sesame-runtime-osgi/pom.xml delete mode 100644 pig/accumulo.pig/pom.xml delete mode 100644 pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java delete mode 100644 pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java delete mode 100644 pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java delete mode 100644 pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java delete mode 100644 pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java delete mode 100644 pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java delete mode 100644 pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java delete mode 100644 pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java delete mode 100644 pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java delete mode 100644 pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java delete mode 100644 pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java delete mode 100644 pig/accumulo.pig/src/test/resources/ResultsFile1.txt delete mode 100644 pig/accumulo.pig/src/test/resources/testQuery.txt delete mode 100644 pig/accumulo.pig/src/test/resources/testQuery2.txt delete mode 100644 pig/pom.xml delete mode 100644 sail/pom.xml delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java delete mode 100644 sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java delete mode 100644 sail/src/main/resources/META-INF/org.openrdf.store.schemas delete mode 100644 sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl delete mode 100644 sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory delete mode 100644 sail/src/main/resources/ehcache.xml delete mode 100644 sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java delete mode 100644 sail/src/test/java/mvm/rya/HashJoinTest.java delete mode 100644 sail/src/test/java/mvm/rya/IterativeJoinTest.java delete mode 100644 sail/src/test/java/mvm/rya/MergeJoinTest.java delete mode 100644 sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java delete mode 100644 sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java delete mode 100644 sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java delete mode 100644 sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java delete mode 100644 sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java delete mode 100644 sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java delete mode 100644 sail/src/test/resources/cdrdf.xml delete mode 100644 sail/src/test/resources/namedgraphs.trig delete mode 100644 sail/src/test/resources/ntriples.nt delete mode 100644 sail/src/test/resources/reification.xml delete mode 100644 sail/src/test/resources/univ-bench.owl delete mode 100644 src/license/apacheV2Header.ftl delete mode 100644 temp.txt delete mode 100644 web/pom.xml delete mode 100644 web/web.rya/pom.xml delete mode 100644 web/web.rya/resources/environment.properties delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java delete mode 100644 web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java delete mode 100644 web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml delete mode 100644 web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml delete mode 100644 web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml delete mode 100644 web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml delete mode 100644 web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml delete mode 100644 web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml delete mode 100644 web/web.rya/src/main/webapp/WEB-INF/web.xml delete mode 100644 web/web.rya/src/main/webapp/crossdomain.xml delete mode 100644 web/web.rya/src/main/webapp/sparqlQuery.jsp delete mode 100644 web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java delete mode 100644 web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java delete mode 100644 web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java delete mode 100644 web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java delete mode 100644 web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java delete mode 100644 web/web.rya/src/test/resources/cdrdf.xml delete mode 100644 web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml delete mode 100644 web/web.rya/src/test/resources/controllerIntegrationTest-root.xml delete mode 100644 web/web.rya/src/test/resources/controllerTest-context.xml delete mode 100644 web/web.rya/src/test/resources/dummyData/memorystore.data delete mode 100644 web/web.rya/src/test/resources/namedgraphs.trig delete mode 100644 web/web.rya/src/test/resources/test.nt diff --git a/LICENSE b/LICENSE deleted file mode 100644 index d64569567..000000000 --- a/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/NOTICE b/NOTICE deleted file mode 100644 index 71483079d..000000000 --- a/NOTICE +++ /dev/null @@ -1,6 +0,0 @@ -Apache Rya -Copyright 2015 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - diff --git a/README.md b/README.md index 7869bef31..0376054c6 100644 --- a/README.md +++ b/README.md @@ -15,303 +15,9 @@ KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> -# RYA +# NOTICE -## Overview +This branch does not contain any source code. It exists for two reasons: -RYA is a scalable RDF Store that is built on top of a Columnar Index Store (such as Accumulo). It is implemented as an extension to OpenRdf to provide easy query mechanisms (SPARQL, SERQL, etc) and Rdf data storage (RDF/XML, NTriples, etc). - -RYA stands for RDF y(and) Accumulo. - -## Rya Manual - -A copy of the Rya Manual is located [here](extras/rya.manual/src/site/markdown/index.md). The material in the manual and below may be out of sync. - -## Upgrade Path - -Since the data encodings changed in the 3.2.2 release, you will need to run the Upgrade322Tool MapReduce job to perform the upgrade. - -1. Build the project with -Pmr to build the mapreduce artifacts -2. Make sure to clone the rya tables before doing the upgrade -3. Run - -``` -hadoop jar accumulo.rya-mr.jar mvm.rya.accumulo.mr.upgrade.Upgrade322Tool -Dac.instance={} -Dac.username={} -Dac.pwd={} -``` - -## Quick Start - -This tutorial will outline the steps needed to get quickly started with the Rya store using the web based endpoint. - -### Prerequisites - -* Columnar Store (either Accumulo) The tutorial will go forward using Accumulo -* Rya code (Git: git://git.apache.org/incubator-rya.git) -* Maven 3.0 + - -### Building from Source - -Using Git, pull down the latest code from the url above. - -Run the command to build the code `mvn clean install` - -If all goes well, the build should be successful and a war should be produced in `web/web.rya/target/web.rya.war` - -Note: To perform a build of the geomesa/lucene indexing, run the build with the profile 'indexing' `mvn clean install -P indexing` - -Note: If you are building on windows, you will need hadoop-common 2.6.0's `winutils.exe` and `hadoop.dll`. You can download it from [here](https://github.com/amihalik/hadoop-common-2.6.0-bin/archive/master.zip). This build requires the [Visual C++ Redistributable for Visual Studio 2015 (x64)](https://www.microsoft.com/en-us/download/details.aspx?id=48145). Also you will need to set your path and Hadoop home using the commands below: - -``` -set HADOOP_HOME=c:\hadoop-common-2.6.0-bin -set PATH=%PATH%;c:\hadoop-common-2.6.0-bin\bin -``` - -### Deployment Using Tomcat - -Unwar the above war into the webapps directory. - -To point the web.rya war to the appropriate Accumulo instance, make a properties file `environment.properties` and put it in the classpath. Here is an example: - -``` -instance.name=accumulo #Accumulo instance name -instance.zk=localhost:2181 #Accumulo Zookeepers -instance.username=root #Accumulo username -instance.password=secret #Accumulo pwd -rya.tableprefix=triplestore_ #Rya Table Prefix -rya.displayqueryplan=true #To display the query plan -``` - -Start the Tomcat server. `./bin/startup.sh` - -## Usage - -### Load Data - -#### Web REST endpoint - -The War sets up a Web REST endpoint at `http://server/web.rya/loadrdf` that allows POST data to get loaded into the Rdf Store. This short tutorial will use Java code to post data. - -First, you will need data to load and will need to figure out what format that data is in. - -For this sample, we will use the following N-Triples: - -``` - . - "Thing" . - . -``` - -Save this file somewhere $RDF_DATA - -Second, use the following Java code to load data to the REST endpoint: - -``` JAVA -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.net.URL; -import java.net.URLConnection; - -public class LoadDataServletRun { - - public static void main(String[] args) { - try { - final InputStream resourceAsStream = Thread.currentThread().getContextClassLoader() - .getResourceAsStream("$RDF_DATA"); - URL url = new URL("http://server/web.rya/loadrdf" + - "?format=N-Triples" + - ""); - URLConnection urlConnection = url.openConnection(); - urlConnection.setRequestProperty("Content-Type", "text/plain"); - urlConnection.setDoOutput(true); - - final OutputStream os = urlConnection.getOutputStream(); - - int read; - while((read = resourceAsStream.read()) >= 0) { - os.write(read); - } - resourceAsStream.close(); - os.flush(); - - BufferedReader rd = new BufferedReader(new InputStreamReader( - urlConnection.getInputStream())); - String line; - while ((line = rd.readLine()) != null) { - System.out.println(line); - } - rd.close(); - os.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } -} -``` - -Compile and run this code above, changing the references for $RDF_DATA and the url that your Rdf War is running at. - -The default "format" is RDF/XML, but these formats are supported : RDFXML, NTRIPLES, TURTLE, N3, TRIX, TRIG. - -#### Bulk Loading data - -Bulk loading data is done through Map Reduce jobs - -##### Bulk Load RDF data - - This Map Reduce job will read a full file into memory and parse it into statements. The statements are saved into the store. Here is an example for storing in Accumulo: - -``` -hadoop jar target/accumulo.rya-3.0.4-SNAPSHOT-shaded.jar mvm.rya.accumulo.mr.fileinput.BulkNtripsInputTool -Dac.zk=localhost:2181 -Dac.instance=accumulo -Dac.username=root -Dac.pwd=secret -Drdf.tablePrefix=triplestore_ -Dio.sort.mb=64 /tmp/temp.ntrips -``` - -Options: - -* rdf.tablePrefix : The tables (spo, po, osp) are prefixed with this qualifier. The tables become: (rdf.tablePrefix)spo,(rdf.tablePrefix)po,(rdf.tablePrefix)osp -* ac.* : Accumulo connection parameters -* rdf.format : See RDFFormat from openrdf, samples include (Trig, N-Triples, RDF/XML) -* io.sort.mb : Higher the value, the faster the job goes. Just remember that you will need this much ram at least per mapper - -The argument is the directory/file to load. This file needs to be loaded into HDFS before running. - -#### Direct OpenRDF API - -Here is some sample code to load data directly through the OpenRDF API. (Loading N-Triples data) -You will need at least accumulo.rya-, rya.api, rya.sail.impl on the classpath and transitive dependencies. I find that Maven is the easiest way to get a project dependency tree set up. - -``` JAVA -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -AccumuloRyaDAO dao = new AccumuloRdfDAO(); -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); -dao.setConnector(connector); -conf.setTablePrefix("rya_"); -dao.setConf(conf); -store.setRdfDao(dao); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); -RepositoryConnection conn = myRepository.getConnection(); - -//load data from file -final File file = new File("ntriples.ntrips"); -conn.add(new FileInputStream(file), file.getName(), - RDFFormat.NTRIPLES, new Resource[]{}); - -conn.commit(); - -conn.close(); -myRepository.shutDown(); -``` - - -### Query Data - -#### Web JSP endpoint - -Open a url to `http://server/web.rya/sparqlQuery.jsp`. This simple form can run Sparql. - -### Web REST endpoint - -The War sets up a Web REST endpoint at `http://server/web.rya/queryrdf` that allows GET requests with queries. - -For this sample, we will assume you already loaded data from the [loaddata.html] tutorial - -Save this file somewhere $RDF_DATA - -Second, use the following Java code to load data to the REST endpoint: - -``` JAVA -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; -import java.net.URLEncoder; - -public class QueryDataServletRun { - - public static void main(String[] args) { - try { - String query = "select * where {\n" + - " ?p ?o.\n" + - "}"; - - String queryenc = URLEncoder.encode(query, "UTF-8"); - - URL url = new URL("http://server/rdfTripleStore/queryrdf?query=" + queryenc); - URLConnection urlConnection = url.openConnection(); - urlConnection.setDoOutput(true); - - BufferedReader rd = new BufferedReader(new InputStreamReader( - urlConnection.getInputStream())); - String line; - while ((line = rd.readLine()) != null) { - System.out.println(line); - } - rd.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } -} -``` - -Compile and run this code above, changing the url that your Rdf War is running at. - -#### Direct Code - -Here is a code snippet for directly running against Accumulo with the code. You will need at least accumulo.rya.jar, rya.api, rya.sail.impl on the classpath and transitive dependencies. I find that Maven is the easiest way to get a project dependency tree set up. - -``` JAVA -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); - -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); -crdfdao.setConnector(connector); - -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -conf.setTablePrefix("rts_"); -conf.setDisplayQueryPlan(true); -crdfdao.setConf(conf); -store.setRdfDao(crdfdao); - -InferenceEngine inferenceEngine = new InferenceEngine(); -inferenceEngine.setRdfDao(crdfdao); -inferenceEngine.setConf(conf); -store.setInferenceEngine(inferenceEngine); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); - -String query = "select * where {\n" + - " ?p ?o.\n" + - "}"; -RepositoryConnection conn = myRepository.getConnection(); -System.out.println(query); -TupleQuery tupleQuery = conn.prepareTupleQuery( - QueryLanguage.SPARQL, query); -ValueFactory vf = ValueFactoryImpl.getInstance(); - -TupleQueryResultHandler writer = new SPARQLResultsXMLWriter(System.out); -tupleQuery.evaluate(new TupleQueryResultHandler() { - - int count = 0; - - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - System.out.println(bindingSet); - } -}); - -conn.close(); -myRepository.shutDown(); -``` +1. We pushed a large change and this broke the process that pushes changes to github. We needed a small commit to wake up the process, so we created this branch and pushed a small change. +2. Apache currently prevents branch deletes \ No newline at end of file diff --git a/common/pom.xml b/common/pom.xml deleted file mode 100644 index c14d712f6..000000000 --- a/common/pom.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - - rya.common - Apache Rya Common Projects - - pom - - - rya.api - rya.provenance - - diff --git a/common/rya.api/.gitignore b/common/rya.api/.gitignore deleted file mode 100644 index b83d22266..000000000 --- a/common/rya.api/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target/ diff --git a/common/rya.api/pom.xml b/common/rya.api/pom.xml deleted file mode 100644 index 7c90521dc..000000000 --- a/common/rya.api/pom.xml +++ /dev/null @@ -1,76 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.common - 3.2.10-SNAPSHOT - - - rya.api - Apache Rya Common API - - - - org.calrissian.mango - mango-core - - - - org.openrdf.sesame - sesame-model - - - org.openrdf.sesame - sesame-query - - - org.openrdf.sesame - sesame-queryalgebra-model - - - org.openrdf.sesame - sesame-queryalgebra-evaluation - - - - com.google.guava - guava - - - org.apache.hadoop - hadoop-common - - - joda-time - joda-time - - - - junit - junit - test - - - - diff --git a/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java b/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java deleted file mode 100644 index eea50f497..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java +++ /dev/null @@ -1,55 +0,0 @@ -package mvm.rya.api; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Class InvalidValueTypeMarkerRuntimeException - * Date: Jan 7, 2011 - * Time: 12:58:27 PM - */ -public class InvalidValueTypeMarkerRuntimeException extends RuntimeException { - private int valueTypeMarker = -1; - - public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker) { - super(); - this.valueTypeMarker = valueTypeMarker; - } - - public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker, String s) { - super(s); - this.valueTypeMarker = valueTypeMarker; - } - - public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker, String s, Throwable throwable) { - super(s, throwable); - this.valueTypeMarker = valueTypeMarker; - } - - public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker, Throwable throwable) { - super(throwable); - this.valueTypeMarker = valueTypeMarker; - } - - public int getValueTypeMarker() { - return valueTypeMarker; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java deleted file mode 100644 index 1d0e16543..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java +++ /dev/null @@ -1,507 +0,0 @@ -package mvm.rya.api; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; - -import mvm.rya.api.layout.TableLayoutStrategy; -import mvm.rya.api.layout.TablePrefixLayoutStrategy; -import mvm.rya.api.persist.RdfEvalStatsDAO; - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; - -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; - -/** - * Rdf triple store specific configuration - */ -public abstract class RdfCloudTripleStoreConfiguration extends Configuration { - - // public static final String CONF_ISQUERYTIMEBASED = "query.timebased"; - public static final String CONF_TTL = "query.ttl"; - public static final String CONF_STARTTIME = "query.startTime"; - // public static final String CONF_TIMEINDEXURIS = "query.timeindexuris"; - public static final String CONF_NUM_THREADS = "query.numthreads"; - public static final String CONF_PERFORMANT = "query.performant"; - public static final String CONF_INFER = "query.infer"; - public static final String CONF_USE_STATS = "query.usestats"; - public static final String CONF_USE_COMPOSITE = "query.usecompositecard"; - public static final String CONF_USE_SELECTIVITY = "query.useselectivity"; - public static final String CONF_TBL_PREFIX = "query.tblprefix"; - public static final String CONF_BATCH_SIZE = "query.batchsize"; - public static final String CONF_OFFSET = "query.offset"; - public static final String CONF_LIMIT = "query.limit"; - public static final String CONF_QUERYPLAN_FLAG = "query.printqueryplan"; - public static final String CONF_QUERY_AUTH = "query.auth"; - public static final String CONF_RESULT_FORMAT = "query.resultformat"; - public static final String CONF_CV = "conf.cv"; - public static final String CONF_TBL_SPO = "tbl.spo"; - public static final String CONF_TBL_PO = "tbl.po"; - public static final String CONF_TBL_OSP = "tbl.osp"; - public static final String CONF_TBL_NS = "tbl.ns"; - public static final String CONF_TBL_EVAL = "tbl.eval"; - public static final String CONF_PREFIX_ROW_WITH_HASH = "tbl.hashprefix"; - public static final String CONF_OPTIMIZERS = "query.optimizers"; - public static final String CONF_PCJ_OPTIMIZER = "pcj.query.optimizer"; - public static final String CONF_PCJ_TABLES = "pcj.index.tables"; - - - /** - * @deprecated use CONF_* - */ - public static final String BINDING_DISP_QUERYPLAN = CONF_QUERYPLAN_FLAG; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_AUTH = CONF_QUERY_AUTH; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_CV = CONF_CV; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_TTL = CONF_TTL; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_STARTTIME = CONF_STARTTIME; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_PERFORMANT = CONF_PERFORMANT; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_INFER = CONF_INFER; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_USESTATS = CONF_USE_STATS; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_OFFSET = CONF_OFFSET; - /** - * @deprecated use CONF_* - */ - public static final String BINDING_LIMIT = CONF_LIMIT; - - public static final String STATS_PUSH_EMPTY_RDFTYPE_DOWN = "conf.stats.rdftype.down"; - public static final String INFER_INCLUDE_INVERSEOF = "infer.include.inverseof"; - public static final String INFER_INCLUDE_SUBCLASSOF = "infer.include.subclassof"; - public static final String INFER_INCLUDE_SUBPROPOF = "infer.include.subpropof"; - public static final String INFER_INCLUDE_SYMMPROP = "infer.include.symmprop"; - public static final String INFER_INCLUDE_TRANSITIVEPROP = "infer.include.transprop"; - - public static final String RDF_DAO_CLASS = "class.rdf.dao"; - public static final String RDF_EVAL_STATS_DAO_CLASS = "class.rdf.evalstats"; - - public static final String REGEX_SUBJECT = "query.regex.subject"; - public static final String REGEX_PREDICATE = "query.regex.predicate"; - public static final String REGEX_OBJECT = "query.regex.object"; - private static final String[] EMPTY_STR_ARR = new String[0]; - - private TableLayoutStrategy tableLayoutStrategy = new TablePrefixLayoutStrategy(); - - public RdfCloudTripleStoreConfiguration() { - } - - public RdfCloudTripleStoreConfiguration(Configuration other) { - super(other); - if (other instanceof RdfCloudTripleStoreConfiguration) { - setTableLayoutStrategy(((RdfCloudTripleStoreConfiguration) other).getTableLayoutStrategy()); - } - } - - public abstract RdfCloudTripleStoreConfiguration clone(); - - public TableLayoutStrategy getTableLayoutStrategy() { - return tableLayoutStrategy; - } - - public void setTableLayoutStrategy(TableLayoutStrategy tableLayoutStrategy) { - if (tableLayoutStrategy != null) { - this.tableLayoutStrategy = tableLayoutStrategy; - } else { - this.tableLayoutStrategy = new TablePrefixLayoutStrategy(); //default - } - set(CONF_TBL_SPO, this.tableLayoutStrategy.getSpo()); - set(CONF_TBL_PO, this.tableLayoutStrategy.getPo()); - set(CONF_TBL_OSP, this.tableLayoutStrategy.getOsp()); - set(CONF_TBL_NS, this.tableLayoutStrategy.getNs()); - set(CONF_TBL_EVAL, this.tableLayoutStrategy.getEval()); - } - - public Long getTtl() { - String val = get(CONF_TTL); - if (val != null) { - return Long.valueOf(val); - } - return null; - } - - public void setTtl(Long ttl) { - Preconditions.checkNotNull(ttl); - Preconditions.checkArgument(ttl >= 0, "ttl must be non negative"); - set(CONF_TTL, ttl.toString()); - } - - public Long getStartTime() { - String val = get(CONF_STARTTIME); - if (val != null) { - return Long.valueOf(val); - } - return null; - } - - public void setStartTime(Long startTime) { - Preconditions.checkNotNull(startTime); - Preconditions.checkArgument(startTime >= 0, "startTime must be non negative"); - set(CONF_STARTTIME, startTime.toString()); - } - - public Integer getNumThreads() { - return getInt(CONF_NUM_THREADS, 2); - } - - public void setNumThreads(Integer numThreads) { - Preconditions.checkNotNull(numThreads); - Preconditions.checkArgument(numThreads > 0, "numThreads must be greater than 0"); - setInt(CONF_NUM_THREADS, numThreads); - } - - public Boolean isPerformant() { - return getBoolean(CONF_PERFORMANT, true); - } - - public void setPerformant(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(CONF_PERFORMANT, val); - } - - public Boolean isInfer() { - return getBoolean(CONF_INFER, true); - } - - public void setInfer(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(CONF_INFER, val); - } - - public Boolean isUseStats() { - return getBoolean(CONF_USE_STATS, false); - } - - public void setUseStats(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(CONF_USE_STATS, val); - } - - public Boolean isUseSelectivity() { - return getBoolean(CONF_USE_SELECTIVITY, false); - } - - public void setUseSelectivity(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(CONF_USE_SELECTIVITY, val); - } - - public Boolean isPrefixRowsWithHash() { - return getBoolean(CONF_PREFIX_ROW_WITH_HASH, false); - } - - public void setPrefixRowsWithHash(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(CONF_PREFIX_ROW_WITH_HASH, val); - } - - public String getTablePrefix() { - return get(CONF_TBL_PREFIX, RdfCloudTripleStoreConstants.TBL_PRFX_DEF); - } - - public void setTablePrefix(String tablePrefix) { - Preconditions.checkNotNull(tablePrefix); - set(CONF_TBL_PREFIX, tablePrefix); - setTableLayoutStrategy(new TablePrefixLayoutStrategy(tablePrefix)); //TODO: Should we change the layout strategy - } - - public Integer getBatchSize() { - String val = get(CONF_BATCH_SIZE); - if (val != null) { - return Integer.valueOf(val); - } - return null; - } - - public void setBatchSize(Long batchSize) { - Preconditions.checkNotNull(batchSize); - Preconditions.checkArgument(batchSize > 0, "Batch Size must be greater than 0"); - setLong(CONF_BATCH_SIZE, batchSize); - } - - public Long getOffset() { - String val = get(CONF_OFFSET); - if (val != null) { - return Long.valueOf(val); - } - return null; - } - - public void setOffset(Long offset) { - Preconditions.checkNotNull(offset); - Preconditions.checkArgument(offset >= 0, "offset must be positive"); - setLong(CONF_OFFSET, offset); - } - - public Long getLimit() { - String val = get(CONF_LIMIT); - if (val != null) { - return Long.valueOf(val); - } - return null; - } - - public void setLimit(Long limit) { - Preconditions.checkNotNull(limit); - Preconditions.checkArgument(limit >= 0, "limit must be positive"); - setLong(CONF_LIMIT, limit); - } - - - public Boolean isDisplayQueryPlan() { - return getBoolean(CONF_QUERYPLAN_FLAG, false); - } - - public void setDisplayQueryPlan(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(CONF_QUERYPLAN_FLAG, val); - } - - /** - * @return - * @deprecated - */ - public String getAuth() { - return Joiner.on(",").join(getAuths()); - } - - /** - * @param auth - * @deprecated - */ - public void setAuth(String auth) { - Preconditions.checkNotNull(auth); - setStrings(CONF_QUERY_AUTH, auth); - } - - public String[] getAuths() { - return getStrings(CONF_QUERY_AUTH, EMPTY_STR_ARR); - } - - public void setAuths(String... auths) { - setStrings(CONF_QUERY_AUTH, auths); - } - - public String getEmit() { - return get(CONF_RESULT_FORMAT); - } - - public void setEmit(String emit) { - Preconditions.checkNotNull(emit); - set(CONF_RESULT_FORMAT, emit); - } - - public String getCv() { - return get(CONF_CV); - } - - public void setCv(String cv) { - Preconditions.checkNotNull(cv); - set(CONF_CV, cv); - } - - - public Boolean isUseCompositeCardinality() { - return getBoolean(CONF_USE_COMPOSITE, true); - } - - public void setCompositeCardinality(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(CONF_USE_COMPOSITE, val); - } - - - public Boolean isStatsPushEmptyRdftypeDown() { - return getBoolean(STATS_PUSH_EMPTY_RDFTYPE_DOWN, true); - } - - public void setStatsPushEmptyRdftypeDown(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(STATS_PUSH_EMPTY_RDFTYPE_DOWN, val); - } - - public Boolean isInferInverseOf() { - return getBoolean(INFER_INCLUDE_INVERSEOF, true); - } - - public void setInferInverseOf(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(INFER_INCLUDE_INVERSEOF, val); - } - - public Boolean isInferSubClassOf() { - return getBoolean(INFER_INCLUDE_SUBCLASSOF, true); - } - - public void setInferSubClassOf(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(INFER_INCLUDE_SUBCLASSOF, val); - } - - public Boolean isInferSubPropertyOf() { - return getBoolean(INFER_INCLUDE_SUBPROPOF, true); - } - - public void setInferSubPropertyOf(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(INFER_INCLUDE_SUBPROPOF, val); - } - - public Boolean isInferSymmetricProperty() { - return getBoolean(INFER_INCLUDE_SYMMPROP, true); - } - - public void setInferSymmetricProperty(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(INFER_INCLUDE_SYMMPROP, val); - } - - public Boolean isInferTransitiveProperty() { - return getBoolean(INFER_INCLUDE_TRANSITIVEPROP, true); - } - - public void setInferTransitiveProperty(Boolean val) { - Preconditions.checkNotNull(val); - setBoolean(INFER_INCLUDE_TRANSITIVEPROP, val); - } - - public void setRdfEvalStatsDaoClass(Class rdfEvalStatsDaoClass) { - Preconditions.checkNotNull(rdfEvalStatsDaoClass); - setClass(RDF_EVAL_STATS_DAO_CLASS, rdfEvalStatsDaoClass, RdfEvalStatsDAO.class); - } - - public Class getRdfEvalStatsDaoClass() { - return getClass(RDF_EVAL_STATS_DAO_CLASS, null, RdfEvalStatsDAO.class); - } - - - public void setPcjTables(List indexTables) { - Preconditions.checkNotNull(indexTables); - setStrings(CONF_PCJ_TABLES, indexTables.toArray(new String[]{})); - } - - - public List getPcjTables() { - List pcjTables = Lists.newArrayList(); - String[] tables = getStrings(CONF_PCJ_TABLES); - if(tables == null) { - return pcjTables; - } - for(String table: tables) { - Preconditions.checkNotNull(table); - pcjTables.add(table); - } - return pcjTables; - } - - - public void setPcjOptimizer(Class optimizer) { - Preconditions.checkNotNull(optimizer); - setClass(CONF_PCJ_OPTIMIZER, optimizer, QueryOptimizer.class); - } - - public Class getPcjOptimizer() { - Class opt = getClass(CONF_PCJ_OPTIMIZER, null, QueryOptimizer.class); - if (opt != null) { - Preconditions.checkArgument(QueryOptimizer.class.isAssignableFrom(opt)); - return (Class) opt; - } else { - return null; - } - - } - - - public void setOptimizers(List> optimizers) { - Preconditions.checkNotNull(optimizers); - List strs = Lists.newArrayList(); - for (Class ai : optimizers){ - Preconditions.checkNotNull(ai); - strs.add(ai.getName()); - } - - setStrings(CONF_OPTIMIZERS, strs.toArray(new String[]{})); - } - - public List> getOptimizers() { - List> opts = Lists.newArrayList(); - for (Class clazz : getClasses(CONF_OPTIMIZERS)){ - Preconditions.checkArgument(QueryOptimizer.class.isAssignableFrom(clazz)); - opts.add((Class) clazz); - } - - return opts; - } - - - - public String getRegexSubject() { - return get(REGEX_SUBJECT); - } - - public void setRegexSubject(String regexSubject) { - Preconditions.checkNotNull(regexSubject); - set(REGEX_SUBJECT, regexSubject); - } - - public String getRegexPredicate() { - return get(REGEX_PREDICATE); - } - - public void setRegexPredicate(String regex) { - Preconditions.checkNotNull(regex); - set(REGEX_PREDICATE, regex); - } - - public String getRegexObject() { - return get(REGEX_OBJECT); - } - - public void setRegexObject(String regex) { - Preconditions.checkNotNull(regex); - set(REGEX_OBJECT, regex); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java deleted file mode 100644 index 5311bd909..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java +++ /dev/null @@ -1,151 +0,0 @@ -package mvm.rya.api; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.domain.RyaSchema; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import org.apache.hadoop.io.Text; -import org.openrdf.model.Literal; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -public class RdfCloudTripleStoreConstants { - - public static final String NAMESPACE = RyaSchema.NAMESPACE; - public static final String AUTH_NAMESPACE = RyaSchema.AUTH_NAMESPACE; - public static ValueFactory VALUE_FACTORY = ValueFactoryImpl.getInstance(); - public static URI RANGE = VALUE_FACTORY.createURI(NAMESPACE, "range"); - public static URI PARTITION_TIMERANGE = VALUE_FACTORY.createURI("urn:mvm.mmrts.partition.rdf/08/2011#", "timeRange"); - public static Literal EMPTY_LITERAL = VALUE_FACTORY.createLiteral(0); - public static final byte EMPTY_BYTES[] = new byte[0]; - public static final Text EMPTY_TEXT = new Text(); - - public static final Long MAX_MEMORY = 10000000l; - public static final Long MAX_TIME = 60000l; - public static final Integer NUM_THREADS = 4; - -// public static final String TS = "ts"; -// public static final Text TS_TXT = new Text(TS); - -// public static final String INFO = "info"; -// public static final Text INFO_TXT = new Text(INFO); - - public static final String SUBJECT_CF = "s"; - public static final Text SUBJECT_CF_TXT = new Text(SUBJECT_CF); - public static final String PRED_CF = "p"; - public static final Text PRED_CF_TXT = new Text(PRED_CF); - public static final String OBJ_CF = "o"; - public static final Text OBJ_CF_TXT = new Text(OBJ_CF); - public static final String SUBJECTOBJECT_CF = "so"; - public static final Text SUBJECTOBJECT_CF_TXT = new Text(SUBJECTOBJECT_CF); - public static final String SUBJECTPRED_CF = "sp"; - public static final Text SUBJECTPRED_CF_TXT = new Text(SUBJECTPRED_CF); - public static final String PREDOBJECT_CF = "po"; - public static final Text PREDOBJECT_CF_TXT = new Text(PREDOBJECT_CF); - - public static final String TBL_PRFX_DEF = "rya_"; - public static final String TBL_SPO_SUFFIX = "spo"; - public static final String TBL_PO_SUFFIX = "po"; - public static final String TBL_OSP_SUFFIX = "osp"; - public static final String TBL_EVAL_SUFFIX = "eval"; - public static final String TBL_STATS_SUFFIX = "prospects"; - public static final String TBL_SEL_SUFFIX = "selectivity"; - public static final String TBL_NS_SUFFIX = "ns"; - public static String TBL_SPO = TBL_PRFX_DEF + TBL_SPO_SUFFIX; - public static String TBL_PO = TBL_PRFX_DEF + TBL_PO_SUFFIX; - public static String TBL_OSP = TBL_PRFX_DEF + TBL_OSP_SUFFIX; - public static String TBL_EVAL = TBL_PRFX_DEF + TBL_EVAL_SUFFIX; - public static String TBL_STATS = TBL_PRFX_DEF + TBL_STATS_SUFFIX; - public static String TBL_SEL = TBL_PRFX_DEF + TBL_SEL_SUFFIX; - public static String TBL_NAMESPACE = TBL_PRFX_DEF + TBL_NS_SUFFIX; - - public static Text TBL_SPO_TXT = new Text(TBL_SPO); - public static Text TBL_PO_TXT = new Text(TBL_PO); - public static Text TBL_OSP_TXT = new Text(TBL_OSP); - public static Text TBL_EVAL_TXT = new Text(TBL_EVAL); - public static Text TBL_NAMESPACE_TXT = new Text(TBL_NAMESPACE); - - public static void prefixTables(String prefix) { - if (prefix == null) - prefix = TBL_PRFX_DEF; - TBL_SPO = prefix + TBL_SPO_SUFFIX; - TBL_PO = prefix + TBL_PO_SUFFIX; - TBL_OSP = prefix + TBL_OSP_SUFFIX; - TBL_EVAL = prefix + TBL_EVAL_SUFFIX; - TBL_NAMESPACE = prefix + TBL_NS_SUFFIX; - - TBL_SPO_TXT = new Text(TBL_SPO); - TBL_PO_TXT = new Text(TBL_PO); - TBL_OSP_TXT = new Text(TBL_OSP); - TBL_EVAL_TXT = new Text(TBL_EVAL); - TBL_NAMESPACE_TXT = new Text(TBL_NAMESPACE); - } - - public static final String INFO_NAMESPACE = "namespace"; - public static final Text INFO_NAMESPACE_TXT = new Text(INFO_NAMESPACE); - - public static final byte DELIM_BYTE = 0; - public static final byte TYPE_DELIM_BYTE = 1; - public static final byte LAST_BYTE = -1; //0xff - public static final byte[] LAST_BYTES = new byte[]{LAST_BYTE}; - public static final byte[] TYPE_DELIM_BYTES = new byte[]{TYPE_DELIM_BYTE}; - public static final String DELIM = "\u0000"; - public static final String DELIM_STOP = "\u0001"; - public static final String LAST = "\uFFDD"; - public static final String TYPE_DELIM = new String(TYPE_DELIM_BYTES); - public static final byte[] DELIM_BYTES = DELIM.getBytes(); - public static final byte[] DELIM_STOP_BYTES = DELIM_STOP.getBytes(); - - - /* RECORD TYPES */ - public static final int URI_MARKER = 7; - - public static final int BNODE_MARKER = 8; - - public static final int PLAIN_LITERAL_MARKER = 9; - - public static final int LANG_LITERAL_MARKER = 10; - - public static final int DATATYPE_LITERAL_MARKER = 11; - - public static final int EOF_MARKER = 127; - - // public static final Authorizations ALL_AUTHORIZATIONS = new Authorizations( - // "_"); - - public static enum TABLE_LAYOUT { - SPO, PO, OSP - } - - //TODO: This should be in a version file somewhere - public static URI RTS_SUBJECT = VALUE_FACTORY.createURI(NAMESPACE, "rts"); - public static RyaURI RTS_SUBJECT_RYA = new RyaURI(RTS_SUBJECT.stringValue()); - public static URI RTS_VERSION_PREDICATE = VALUE_FACTORY.createURI(NAMESPACE, "version"); - public static RyaURI RTS_VERSION_PREDICATE_RYA = new RyaURI(RTS_VERSION_PREDICATE.stringValue()); - public static final Value VERSION = VALUE_FACTORY.createLiteral("3.0.0"); - public static RyaType VERSION_RYA = new RyaType(VERSION.stringValue()); - - public static String RYA_CONFIG_AUTH = "RYACONFIG"; -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java deleted file mode 100644 index 4a13c01dc..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java +++ /dev/null @@ -1,72 +0,0 @@ -package mvm.rya.api; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.StatementImpl; - -import java.util.ArrayList; -import java.util.Collection; - -public class RdfCloudTripleStoreStatement extends StatementImpl { - - private Resource[] contexts; //TODO: no blank nodes - - public RdfCloudTripleStoreStatement(Resource subject, URI predicate, Value object) { - super(subject, predicate, object); - } - - public RdfCloudTripleStoreStatement(Resource subject, URI predicate, Value object, - Resource... contexts) { - super(subject, predicate, object); - this.contexts = contexts; - } - - public Resource[] getContexts() { - return contexts; - } - - public Collection getStatements() { - Collection statements = new ArrayList(); - - if (getContexts() != null && getContexts().length > 1) { - for (Resource contxt : getContexts()) { - statements.add(new ContextStatementImpl(getSubject(), - getPredicate(), getObject(), contxt)); - } - } else - statements.add(this); - - return statements; - } - - @Override - public Resource getContext() { - if (contexts == null || contexts.length == 0) - return null; - else return contexts[0]; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java deleted file mode 100644 index eeadb9b30..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java +++ /dev/null @@ -1,420 +0,0 @@ -package mvm.rya.api; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.layout.TableLayoutStrategy; -import mvm.rya.api.layout.TablePrefixLayoutStrategy; -import org.openrdf.model.Literal; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.BNodeImpl; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.impl.ValueFactoryImpl; - -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; - -public class RdfCloudTripleStoreUtils { - - public static ValueFactory valueFactory = new ValueFactoryImpl(); - public static final Pattern literalPattern = Pattern.compile("^\"(.*?)\"((\\^\\^<(.+?)>)$|(@(.{2}))$)"); - -// public static byte[] writeValue(Value value) throws IOException { -// return RdfIO.writeValue(value); -//// if (value == null) -//// return new byte[]{}; -//// ByteArrayDataOutput dataOut = ByteStreams.newDataOutput(); -//// if (value instanceof URI) { -//// dataOut.writeByte(RdfCloudTripleStoreConstants.URI_MARKER); -//// writeString(((URI) value).toString(), dataOut); -//// } else if (value instanceof BNode) { -//// dataOut.writeByte(RdfCloudTripleStoreConstants.BNODE_MARKER); -//// writeString(((BNode) value).getID(), dataOut); -//// } else if (value instanceof Literal) { -//// Literal lit = (Literal) value; -//// -//// String label = lit.getLabel(); -//// String language = lit.getLanguage(); -//// URI datatype = lit.getDatatype(); -//// -//// if (datatype != null) { -//// dataOut.writeByte(RdfCloudTripleStoreConstants.DATATYPE_LITERAL_MARKER); -//// writeString(label, dataOut); -//// dataOut.write(writeValue(datatype)); -//// } else if (language != null) { -//// dataOut.writeByte(RdfCloudTripleStoreConstants.LANG_LITERAL_MARKER); -//// writeString(label, dataOut); -//// writeString(language, dataOut); -//// } else { -//// dataOut.writeByte(RdfCloudTripleStoreConstants.PLAIN_LITERAL_MARKER); -//// writeString(label, dataOut); -//// } -//// } else { -//// throw new IllegalArgumentException("unexpected value type: " -//// + value.getClass()); -//// } -//// return dataOut.toByteArray(); -// } - -// public static Value readValue(ByteArrayDataInput dataIn, ValueFactory vf) -// throws IOException, ClassCastException { -// return RdfIO.readValue(dataIn, vf, DELIM_BYTE); -//// int valueTypeMarker; -//// try { -//// valueTypeMarker = dataIn.readByte(); -//// } catch (Exception e) { -//// return null; -//// } -//// -//// Value ret = null; -//// if (valueTypeMarker == RdfCloudTripleStoreConstants.URI_MARKER) { -//// String uriString = readString(dataIn); -//// ret = vf.createURI(uriString); -//// } else if (valueTypeMarker == RdfCloudTripleStoreConstants.BNODE_MARKER) { -//// String bnodeID = readString(dataIn); -//// ret = vf.createBNode(bnodeID); -//// } else if (valueTypeMarker == RdfCloudTripleStoreConstants.PLAIN_LITERAL_MARKER) { -//// String label = readString(dataIn); -//// ret = vf.createLiteral(label); -//// } else if (valueTypeMarker == RdfCloudTripleStoreConstants.LANG_LITERAL_MARKER) { -//// String label = readString(dataIn); -//// String language = readString(dataIn); -//// ret = vf.createLiteral(label, language); -//// } else if (valueTypeMarker == RdfCloudTripleStoreConstants.DATATYPE_LITERAL_MARKER) { -//// String label = readString(dataIn); -//// URI datatype = (URI) readValue(dataIn, vf); -//// ret = vf.createLiteral(label, datatype); -//// } else { -//// throw new InvalidValueTypeMarkerRuntimeException(valueTypeMarker, "Invalid value type marker: " -//// + valueTypeMarker); -//// } -//// -//// return ret; -// } - -// public static void writeString(String s, ByteArrayDataOutput dataOut) -// throws IOException { -// dataOut.writeUTF(s); -// } -// -// public static String readString(ByteArrayDataInput dataIn) -// throws IOException { -// return dataIn.readUTF(); -// } -// -// public static byte[] writeContexts(Resource... contexts) throws IOException { -// if (contexts != null) { -// ByteArrayDataOutput cntxout = ByteStreams.newDataOutput(); -// for (Resource resource : contexts) { -// final byte[] context_bytes = RdfCloudTripleStoreUtils -// .writeValue(resource); -// cntxout.write(context_bytes); -// cntxout.write(RdfCloudTripleStoreConstants.DELIM_BYTES); -// } -// return cntxout.toByteArray(); -// } else -// return new byte[]{}; -// } -// -// public static List readContexts(byte[] cont_arr, ValueFactory vf) -// throws IOException { -// List contexts = new ArrayList(); -// String conts_str = new String(cont_arr); -// String[] split = conts_str.split(RdfCloudTripleStoreConstants.DELIM); -// for (String string : split) { -// contexts.add((Resource) RdfCloudTripleStoreUtils.readValue(ByteStreams -// .newDataInput(string.getBytes()), vf)); -// } -// return contexts; -// } - -// public static Statement translateStatementFromRow(ByteArrayDataInput input, Text context, TABLE_LAYOUT tble, ValueFactory vf) throws IOException { -// Resource subject; -// URI predicate; -// Value object; -// if (TABLE_LAYOUT.SPO.equals(tble)) { -// subject = (Resource) RdfCloudTripleStoreUtils.readValue(input, vf); -// predicate = (URI) RdfCloudTripleStoreUtils.readValue(input, vf); -// object = RdfCloudTripleStoreUtils.readValue(input, vf); -// } else if (TABLE_LAYOUT.OSP.equals(tble)) { -// object = RdfCloudTripleStoreUtils.readValue(input, vf); -// subject = (Resource) RdfCloudTripleStoreUtils.readValue(input, vf); -// predicate = (URI) RdfCloudTripleStoreUtils.readValue(input, vf); -// } else if (TABLE_LAYOUT.PO.equals(tble)) { -// predicate = (URI) RdfCloudTripleStoreUtils.readValue(input, vf); -// object = RdfCloudTripleStoreUtils.readValue(input, vf); -// subject = (Resource) RdfCloudTripleStoreUtils.readValue(input, vf); -// } else { -// throw new IllegalArgumentException("Table[" + tble + "] is not valid"); -// } -// if (context == null || INFO_TXT.equals(context)) -// return new StatementImpl(subject, predicate, object); //default graph -// else -// return new ContextStatementImpl(subject, predicate, object, (Resource) readValue(ByteStreams.newDataInput(context.getBytes()), vf)); //TODO: Seems like a perf hog -// } - -// public static byte[] buildRowWith(byte[] bytes_one, byte[] bytes_two, byte[] bytes_three) throws IOException { -// ByteArrayDataOutput rowidout = ByteStreams.newDataOutput(); -// rowidout.write(bytes_one); -// rowidout.writeByte(DELIM_BYTE); -//// rowidout.write(RdfCloudTripleStoreConstants.DELIM_BYTES); -// rowidout.write(bytes_two); -// rowidout.writeByte(DELIM_BYTE); -//// rowidout.write(RdfCloudTripleStoreConstants.DELIM_BYTES); -// rowidout.write(bytes_three); -// return truncateRowId(rowidout.toByteArray()); -// } - -// public static byte[] truncateRowId(byte[] byteArray) { -// if (byteArray.length > 32000) { -// ByteArrayDataOutput stream = ByteStreams.newDataOutput(); -// stream.write(byteArray, 0, 32000); -// return stream.toByteArray(); -// } -// return byteArray; -// } - - - public static class CustomEntry implements Map.Entry { - - private T key; - private U value; - - public CustomEntry(T key, U value) { - this.key = key; - this.value = value; - } - - @Override - public T getKey() { - return key; - } - - @Override - public U getValue() { - return value; - } - - public T setKey(T key) { - this.key = key; - return this.key; - } - - @Override - public U setValue(U value) { - this.value = value; - return this.value; - } - - @Override - public String toString() { - return "CustomEntry{" + - "key=" + key + - ", value=" + value + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - CustomEntry that = (CustomEntry) o; - - if (key != null ? !key.equals(that.key) : that.key != null) return false; - if (value != null ? !value.equals(that.value) : that.value != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = key != null ? key.hashCode() : 0; - result = 31 * result + (value != null ? value.hashCode() : 0); - return result; - } - } - - /** - * If value is a URI, then return as URI, otherwise return namespace/value as the URI - * - * @param namespace - * @param value - * @return - */ - public static URI convertToUri(String namespace, String value) { - if (value == null) - return null; - URI subjUri; - try { - subjUri = valueFactory.createURI(value); - } catch (Exception e) { - //not uri - if (namespace == null) - return null; - subjUri = valueFactory.createURI(namespace, value); - } - return subjUri; - } - - public static Literal convertToDataTypeLiteral(String s) { - int i = s.indexOf("^^"); - if (i != -1) { - String val = s.substring(1, i - 1); - int dt_i_start = i + 2; - int dt_i_end = s.length(); - if (s.charAt(dt_i_start) == '<') { - dt_i_start = dt_i_start + 1; - dt_i_end = dt_i_end - 1; - } - - String dataType = s.substring(dt_i_start, dt_i_end); - return valueFactory.createLiteral(val, valueFactory.createURI(dataType)); - } - return null; - } - - public static boolean isDataTypeLiteral(String lit) { - return lit != null && lit.indexOf("^^") != -1; - } - - public static boolean isUri(String uri) { - if (uri == null) return false; - try { - valueFactory.createURI(uri); - } catch (Exception e) { - return false; - } - return true; - } - - -// public static boolean isQueryTimeBased(Configuration conf) { -// return (conf != null && conf.getBoolean(RdfCloudTripleStoreConfiguration.CONF_ISQUERYTIMEBASED, false)); -// } -// -// public static void setQueryTimeBased(Configuration conf, boolean timeBased) { -// if (conf != null) -// conf.setBoolean(RdfCloudTripleStoreConfiguration.CONF_ISQUERYTIMEBASED, isQueryTimeBased(conf) || timeBased); -// } - - -// public static void addTimeIndexUri(Configuration conf, URI timeUri, Class ttlValueConvClass) { -// String[] timeIndexUris = conf.getStrings(RdfCloudTripleStoreConfiguration.CONF_TIMEINDEXURIS); -// if (timeIndexUris == null) -// timeIndexUris = new String[0]; -// List stringList = new ArrayList(Arrays.asList(timeIndexUris)); -// String timeUri_s = timeUri.stringValue(); -// if (!stringList.contains(timeUri_s)) -// stringList.add(timeUri_s); -// conf.setStrings(RdfCloudTripleStoreConfiguration.CONF_TIMEINDEXURIS, stringList.toArray(new String[stringList.size()])); -// conf.set(timeUri_s, ttlValueConvClass.getName()); -// } - -// public static Class getTtlValueConverter(Configuration conf, URI predicate) throws ClassNotFoundException { -// if (predicate == null) -// return null; -// -// String[] s = conf.getStrings(RdfCloudTripleStoreConfiguration.CONF_TIMEINDEXURIS); -// if (s == null) -// return null; -// -// for (String uri : s) { -// if (predicate.stringValue().equals(uri)) { -// return (Class) RdfCloudTripleStoreUtils.class.getClassLoader().loadClass(conf.get(uri)); -// } -// } -// return null; -// } - - public static String layoutToTable(TABLE_LAYOUT layout, RdfCloudTripleStoreConfiguration conf) { - TableLayoutStrategy tableLayoutStrategy = conf.getTableLayoutStrategy(); - return layoutToTable(layout, tableLayoutStrategy); - } - - public static String layoutToTable(TABLE_LAYOUT layout, TableLayoutStrategy tableLayoutStrategy) { - if (tableLayoutStrategy == null) { - tableLayoutStrategy = new TablePrefixLayoutStrategy(); - } - switch (layout) { - case SPO: { - return tableLayoutStrategy.getSpo(); - } - case PO: { - return tableLayoutStrategy.getPo(); - } - case OSP: { - return tableLayoutStrategy.getOsp(); - } - } - return null; - } - - public static String layoutPrefixToTable(TABLE_LAYOUT layout, String prefix) { - return layoutToTable(layout, new TablePrefixLayoutStrategy(prefix)); - } - - //helper methods to createValue - public static Value createValue(String resource) { - if (isBNode(resource)) - return new BNodeImpl(resource.substring(2)); - Literal literal; - if ((literal = makeLiteral(resource)) != null) - return literal; - if (resource.contains(":") || resource.contains("/") || resource.contains("#")) { - return new URIImpl(resource); - } else { - throw new RuntimeException((new StringBuilder()).append(resource).append(" is not a valid URI, blank node, or literal value").toString()); - } - } - - public static boolean isBNode(String resource) { - return resource.length() > 2 && resource.startsWith("_:"); - } - - public static boolean isLiteral(String resource) { - return literalPattern.matcher(resource).matches() || resource.startsWith("\"") && resource.endsWith("\"") && resource.length() > 1; - } - - public static boolean isURI(String resource) { - return !isBNode(resource) && !isLiteral(resource) && (resource.contains(":") || resource.contains("/") || resource.contains("#")); - } - - public static Literal makeLiteral(String resource) { - Matcher matcher = literalPattern.matcher(resource); - if (matcher.matches()) - if (null != matcher.group(4)) - return new LiteralImpl(matcher.group(1), new URIImpl(matcher.group(4))); - else - return new LiteralImpl(matcher.group(1), matcher.group(6)); - if (resource.startsWith("\"") && resource.endsWith("\"") && resource.length() > 1) - return new LiteralImpl(resource.substring(1, resource.length() - 1)); - else - return null; - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java deleted file mode 100644 index 199b63d45..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java +++ /dev/null @@ -1,80 +0,0 @@ -package mvm.rya.api.date; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Value; - -import javax.xml.datatype.DatatypeConfigurationException; -import javax.xml.datatype.DatatypeFactory; -import java.util.GregorianCalendar; -import java.util.TimeZone; - -/** - * Class DateTimeTtlValueConverter - * @deprecated 2 - */ -public class DateTimeTtlValueConverter implements TtlValueConverter { - - private Value start, stop; - private TimeZone timeZone = TimeZone.getTimeZone("Zulu"); - - @Override - public void convert(String ttl, String startTime) { - try { - long start_l, stop_l; - long ttl_l = Long.parseLong(ttl); - stop_l = System.currentTimeMillis(); - if (startTime != null) - stop_l = Long.parseLong(startTime); - start_l = stop_l - ttl_l; - - GregorianCalendar cal = (GregorianCalendar) GregorianCalendar.getInstance(); - cal.setTimeZone(getTimeZone()); - cal.setTimeInMillis(start_l); - DatatypeFactory factory = DatatypeFactory.newInstance(); - start = vf.createLiteral(factory.newXMLGregorianCalendar(cal)); - - cal.setTimeInMillis(stop_l); - stop = vf.createLiteral(factory.newXMLGregorianCalendar(cal)); - } catch (DatatypeConfigurationException e) { - throw new RuntimeException("Exception occurred creating DataTypeFactory", e); - } - } - - @Override - public Value getStart() { - return start; - } - - @Override - public Value getStop() { - return stop; - } - - public TimeZone getTimeZone() { - return timeZone; - } - - public void setTimeZone(TimeZone timeZone) { - this.timeZone = timeZone; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java deleted file mode 100644 index de4ff8bb3..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java +++ /dev/null @@ -1,56 +0,0 @@ -package mvm.rya.api.date; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Value; - -/** - * Class TimestampTtlValueConverter - * @deprecated - */ -public class TimestampTtlStrValueConverter implements TtlValueConverter { - - private Value start, stop; - - @Override - public void convert(String ttl, String startTime) { - long start_l, stop_l; - long ttl_l = Long.parseLong(ttl); - stop_l = System.currentTimeMillis(); - if (startTime != null) - stop_l = Long.parseLong(startTime); - start_l = stop_l - ttl_l; - - start = vf.createLiteral(start_l + ""); - stop = vf.createLiteral(stop_l + ""); - } - - @Override - public Value getStart() { - return start; - } - - @Override - public Value getStop() { - return stop; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java deleted file mode 100644 index 75366dc08..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java +++ /dev/null @@ -1,56 +0,0 @@ -package mvm.rya.api.date; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Value; - -/** - * Class TimestampTtlValueConverter - * @deprecated - */ -public class TimestampTtlValueConverter implements TtlValueConverter { - - private Value start, stop; - - @Override - public void convert(String ttl, String startTime) { - long start_l, stop_l; - long ttl_l = Long.parseLong(ttl); - stop_l = System.currentTimeMillis(); - if (startTime != null) - stop_l = Long.parseLong(startTime); - start_l = stop_l - ttl_l; - - start = vf.createLiteral(start_l); - stop = vf.createLiteral(stop_l); - } - - @Override - public Value getStart() { - return start; - } - - @Override - public Value getStop() { - return stop; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java deleted file mode 100644 index 1ba984130..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java +++ /dev/null @@ -1,41 +0,0 @@ -package mvm.rya.api.date; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -/** - * Class TtlValueConverter - * @deprecated - */ -public interface TtlValueConverter { - - ValueFactory vf = ValueFactoryImpl.getInstance(); - - public void convert(String ttl, String startTime); - - public Value getStart(); - - public Value getStop(); -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java b/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java deleted file mode 100644 index f5ca08cd1..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java +++ /dev/null @@ -1,38 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.impl.URIImpl; - -/** - * A Node is an expected node in the global graph. This typing of the URI allows us to dictate the difference between a - * URI that is just an Attribute on the subject vs. a URI that is another subject Node in the global graph. It does not - * guarantee that the subject exists, just that there is an Edge to it. - */ -public class Node extends URIImpl { - public Node() { - } - - public Node(String uriString) { - super(uriString); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java deleted file mode 100644 index 67d574295..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java +++ /dev/null @@ -1,52 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.URI; -import org.openrdf.model.Value; - -/** - * Created by IntelliJ IDEA. - * Date: 4/11/12 - * Time: 1:03 PM - * To change this template use File | Settings | File Templates. - */ -public class RangeURI extends RangeValue implements URI { - - public RangeURI(URI start, URI end) { - super(start, end); - } - - public RangeURI(RangeValue rangeValue) { - super((URI) rangeValue.getStart(), (URI) rangeValue.getEnd()); - } - - @Override - public String getNamespace() { - throw new UnsupportedOperationException("Ranges do not have a namespace"); - } - - @Override - public String getLocalName() { - throw new UnsupportedOperationException("Ranges do not have a localname"); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java deleted file mode 100644 index c27edfd37..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java +++ /dev/null @@ -1,72 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Value; - -/** - * Created by IntelliJ IDEA. - * Date: 4/10/12 - * Time: 3:57 PM - * To change this template use File | Settings | File Templates. - */ -public class RangeValue implements Value { - - private T start; - private T end; - - public RangeValue(T start, T end) { - this.start = start; - this.end = end; - } - - @Override - public String stringValue() { - throw new UnsupportedOperationException("Range is only supported at query time"); - } - - public T getStart() { - return start; - } - - public void setStart(T start) { - this.start = start; - } - - public T getEnd() { - return end; - } - - public void setEnd(T end) { - this.end = end; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("RangeValue"); - sb.append("{start=").append(start); - sb.append(", end=").append(end); - sb.append('}'); - return sb.toString(); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java deleted file mode 100644 index e99f451d4..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java +++ /dev/null @@ -1,32 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Date: 7/17/12 - * Time: 10:02 AM - */ -public interface RyaRange { - public RyaType getStart(); - - public RyaType getStop(); -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java deleted file mode 100644 index 6744d209d..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java +++ /dev/null @@ -1,43 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConstants; -import org.openrdf.model.URI; - -/** - * Date: 7/16/12 - * Time: 11:59 AM - */ -public class RyaSchema { - - public static final String NAMESPACE = "urn:mvm.rya/2012/05#"; - public static final String AUTH_NAMESPACE = "urn:mvm.rya/auth/2012/05#"; - public static final String BNODE_NAMESPACE = "urn:mvm.rya/bnode/2012/07#"; - - //datatypes - public static final URI NODE = RdfCloudTripleStoreConstants.VALUE_FACTORY.createURI(NAMESPACE, "node"); - public static final URI LANGUAGE = RdfCloudTripleStoreConstants.VALUE_FACTORY.createURI(NAMESPACE, "lang"); - - //functions - public static final URI RANGE = RdfCloudTripleStoreConstants.VALUE_FACTORY.createURI(NAMESPACE, "range"); -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java deleted file mode 100644 index 18bde98cf..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java +++ /dev/null @@ -1,252 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Arrays; - -/** - * Date: 7/17/12 - * Time: 7:20 AM - */ -public class RyaStatement { - private RyaURI subject; - private RyaURI predicate; - private RyaType object; - private RyaURI context; - private String qualifer; - private byte[] columnVisibility; - private byte[] value; - private Long timestamp; - - public RyaStatement() { - } - - public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object) { - this(subject, predicate, object, null); - } - - public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) { - this(subject, predicate, object, context, null); - } - - public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier) { - this(subject, predicate, object, context, qualifier, null); - } - - public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier, byte[] columnVisibility) { - this(subject, predicate, object, context, qualifier, columnVisibility, null); - } - - public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier, byte[] columnVisibility, byte[] value) { - this(subject, predicate, object, context, qualifier, columnVisibility, value, null); - } - - public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier, byte[] columnVisibility, byte[] value, Long timestamp) { - this.subject = subject; - this.predicate = predicate; - this.object = object; - this.context = context; - this.qualifer = qualifier; - this.columnVisibility = columnVisibility; - this.value = value; - this.timestamp = timestamp != null ? timestamp : System.currentTimeMillis(); - } - - public RyaURI getSubject() { - return subject; - } - - public void setSubject(RyaURI subject) { - this.subject = subject; - } - - public RyaURI getPredicate() { - return predicate; - } - - public void setPredicate(RyaURI predicate) { - this.predicate = predicate; - } - - public RyaType getObject() { - return object; - } - - public void setObject(RyaType object) { - this.object = object; - } - - public RyaURI getContext() { - return context; - } - - public void setContext(RyaURI context) { - this.context = context; - } - - public byte[] getColumnVisibility() { - return columnVisibility; - } - - public void setColumnVisibility(byte[] columnVisibility) { - this.columnVisibility = columnVisibility; - } - - public byte[] getValue() { - return value; - } - - public void setValue(byte[] value) { - this.value = value; - } - - public Long getTimestamp() { - return timestamp; - } - - public void setTimestamp(Long timestamp) { - this.timestamp = timestamp; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - RyaStatement that = (RyaStatement) o; - - if (!Arrays.equals(columnVisibility, that.columnVisibility)) return false; - if (context != null ? !context.equals(that.context) : that.context != null) return false; - if (object != null ? !object.equals(that.object) : that.object != null) return false; - if (predicate != null ? !predicate.equals(that.predicate) : that.predicate != null) return false; - if (qualifer != null ? !qualifer.equals(that.qualifer) : that.qualifer != null) return false; - if (subject != null ? !subject.equals(that.subject) : that.subject != null) return false; - if (timestamp != null ? !timestamp.equals(that.timestamp) : that.timestamp != null) return false; - if (!Arrays.equals(value, that.value)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = subject != null ? subject.hashCode() : 0; - result = 31 * result + (predicate != null ? predicate.hashCode() : 0); - result = 31 * result + (object != null ? object.hashCode() : 0); - result = 31 * result + (context != null ? context.hashCode() : 0); - result = 31 * result + (qualifer != null ? qualifer.hashCode() : 0); - result = 31 * result + (columnVisibility != null ? Arrays.hashCode(columnVisibility) : 0); - result = 31 * result + (value != null ? Arrays.hashCode(value) : 0); - result = 31 * result + (timestamp != null ? timestamp.hashCode() : 0); - return result; - } - - public String getQualifer() { - return qualifer; - } - - public void setQualifer(String qualifer) { - this.qualifer = qualifer; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("RyaStatement"); - sb.append("{subject=").append(subject); - sb.append(", predicate=").append(predicate); - sb.append(", object=").append(object); - sb.append(", context=").append(context); - sb.append(", qualifier=").append(qualifer); - sb.append(", columnVisibility=").append(columnVisibility == null ? "null" : new String(columnVisibility)); - sb.append(", value=").append(value == null ? "null" : new String(value)); - sb.append(", timestamp=").append(timestamp); - sb.append('}'); - return sb.toString(); - } - - public static RyaStatementBuilder builder() { - return new RyaStatementBuilder(); - } - - public static RyaStatementBuilder builder(RyaStatement ryaStatement) { - return new RyaStatementBuilder(ryaStatement); - } - - - //builder - public static class RyaStatementBuilder { - - RyaStatement ryaStatement; - - public RyaStatementBuilder() { - ryaStatement = new RyaStatement(); - } - - public RyaStatementBuilder(RyaStatement ryaStatement) { - this.ryaStatement = ryaStatement; - } - - public RyaStatementBuilder setTimestamp(Long timestamp) { - ryaStatement.setTimestamp(timestamp); - return this; - } - - public RyaStatementBuilder setValue(byte[] value) { - ryaStatement.setValue(value); - return this; - } - - public RyaStatementBuilder setColumnVisibility(byte[] columnVisibility) { - ryaStatement.setColumnVisibility(columnVisibility); - return this; - } - - public RyaStatementBuilder setQualifier(String str) { - ryaStatement.setQualifer(str); - return this; - } - - public RyaStatementBuilder setContext(RyaURI ryaURI) { - ryaStatement.setContext(ryaURI); - return this; - } - - public RyaStatementBuilder setSubject(RyaURI ryaURI) { - ryaStatement.setSubject(ryaURI); - return this; - } - - public RyaStatementBuilder setPredicate(RyaURI ryaURI) { - ryaStatement.setPredicate(ryaURI); - return this; - } - - public RyaStatementBuilder setObject(RyaType ryaType) { - ryaStatement.setObject(ryaType); - return this; - } - - public RyaStatement build() { - return ryaStatement; - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java deleted file mode 100644 index ab580d628..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java +++ /dev/null @@ -1,111 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - - -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Base Rya Type - * Date: 7/16/12 - * Time: 11:45 AM - */ -public class RyaType implements Comparable { - - private URI dataType; - private String data; - - public RyaType() { - setDataType(XMLSchema.STRING); - } - - public RyaType(String data) { - this(XMLSchema.STRING, data); - } - - - public RyaType(URI dataType, String data) { - setDataType(dataType); - setData(data); - } - - /** - * TODO: Can we get away without using the openrdf URI - * - * @return - */ - public URI getDataType() { - return dataType; - } - - public String getData() { - return data; - } - - public void setDataType(URI dataType) { - this.dataType = dataType; - } - - public void setData(String data) { - this.data = data; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("RyaType"); - sb.append("{dataType=").append(dataType); - sb.append(", data='").append(data).append('\''); - sb.append('}'); - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - RyaType ryaType = (RyaType) o; - - if (data != null ? !data.equals(ryaType.data) : ryaType.data != null) return false; - if (dataType != null ? !dataType.equals(ryaType.dataType) : ryaType.dataType != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = dataType != null ? dataType.hashCode() : 0; - result = 31 * result + (data != null ? data.hashCode() : 0); - return result; - } - - @Override - public int compareTo(Object o) { - if (o != null && this.getClass().isInstance(o)) { - RyaType other = (RyaType) o; - return this.getData().compareTo(other.getData()); - } - return -1; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java deleted file mode 100644 index d3944174b..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java +++ /dev/null @@ -1,59 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.URI; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM; -import static mvm.rya.api.RdfCloudTripleStoreConstants.LAST; - -/** - * Date: 7/24/12 - * Time: 3:26 PM - */ -public class RyaTypePrefix extends RyaTypeRange { - - public RyaTypePrefix(URI datatype, String prefix) { - super(); - setPrefix(datatype, prefix); - } - - public RyaTypePrefix(String prefix) { - super(); - setPrefix(prefix); - } - - public void setPrefix(String prefix) { - setStart(new RyaType(prefix + DELIM)); - setStop(new RyaType(prefix + LAST)); - } - - public void setPrefix(URI datatype, String prefix) { - setStart(new RyaType(datatype, prefix + DELIM)); - setStop(new RyaType(datatype, prefix + LAST)); - } - - public String getPrefix() { - String data = getStart().getData(); - return data.substring(0, data.length() - 1); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java deleted file mode 100644 index a7443990e..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java +++ /dev/null @@ -1,99 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.URI; - -/** - * Date: 7/17/12 - * Time: 9:53 AM - */ -public class RyaTypeRange extends RyaType implements RyaRange { - private RyaType start; - private RyaType stop; - - public RyaTypeRange() { - } - - public RyaTypeRange(RyaType start, RyaType stop) { - this.start = start; - this.stop = stop; - } - - public RyaType getStart() { - return start; - } - - public void setStart(RyaType start) { - this.start = start; - } - - public RyaType getStop() { - return stop; - } - - public void setStop(RyaType stop) { - this.stop = stop; - } - - @Override - public URI getDataType() { - return start.getDataType(); - } - - @Override - public String getData() { - throw new UnsupportedOperationException(); - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("RyaTypeRange"); - sb.append("{start=").append(start); - sb.append(", stop=").append(stop); - sb.append('}'); - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - - RyaTypeRange that = (RyaTypeRange) o; - - if (start != null ? !start.equals(that.start) : that.start != null) return false; - if (stop != null ? !stop.equals(that.stop) : that.stop != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + (start != null ? start.hashCode() : 0); - result = 31 * result + (stop != null ? stop.hashCode() : 0); - return result; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java deleted file mode 100644 index aa174c52d..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java +++ /dev/null @@ -1,63 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.URI; -import org.openrdf.model.util.URIUtil; -import org.openrdf.model.vocabulary.XMLSchema; - - -/** - * Date: 7/16/12 - * Time: 11:56 AM - */ -public class RyaURI extends RyaType { - - public RyaURI() { - setDataType(XMLSchema.ANYURI); - } - - public RyaURI(String data) { - super(XMLSchema.ANYURI, data); - } - - public RyaURI(String namespace, String data) { - super(XMLSchema.ANYURI, namespace + data); - } - - protected RyaURI(URI datatype, String data) { - super(datatype, data); - } - - @Override - public void setData(String data) { - super.setData(data); - validate(data); - } - - protected void validate(String data) { - if (data == null) - throw new IllegalArgumentException("Null not URI"); - URIUtil.getLocalNameIndex(data); - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java deleted file mode 100644 index f80860751..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java +++ /dev/null @@ -1,47 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConstants; - -/** - * Date: 7/24/12 - * Time: 3:26 PM - */ -public class RyaURIPrefix extends RyaURIRange { - public static final String LAST = "\u00FF"; - - public RyaURIPrefix(String prefix) { - super(); - setPrefix(prefix); - } - - public void setPrefix(String prefix) { - setStart(new RyaURI(prefix + RdfCloudTripleStoreConstants.DELIM)); - setStop(new RyaURI(prefix + LAST)); - } - - public String getPrefix() { - String data = getStart().getData(); - return data.substring(0, data.length() - 1); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java deleted file mode 100644 index 2c2b83618..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java +++ /dev/null @@ -1,95 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Date: 7/17/12 - * Time: 9:59 AM - */ -public class RyaURIRange extends RyaURI implements RyaRange { - public static final RyaURI LAST_URI = new RyaURI(((char) 255) + ":#" + ((char) 255)); - - private RyaURI start; - private RyaURI stop; - - public RyaURIRange() { - super(); - } - - public RyaURIRange(RyaURI start, RyaURI stop) { - this.start = start; - this.stop = stop; - } - - public RyaURI getStart() { - return start; - } - - public void setStart(RyaURI start) { - this.start = start; - } - - public RyaURI getStop() { - return stop; - } - - public void setStop(RyaURI stop) { - this.stop = stop; - } - - @Override - public String getData() { - throw new UnsupportedOperationException(); - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("RyaURIRange"); - sb.append("{start=").append(start); - sb.append(", stop=").append(stop); - sb.append('}'); - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - - RyaURIRange that = (RyaURIRange) o; - - if (start != null ? !start.equals(that.start) : that.start != null) return false; - if (stop != null ? !stop.equals(that.stop) : that.stop != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + (start != null ? start.hashCode() : 0); - result = 31 * result + (stop != null ? stop.hashCode() : 0); - return result; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java b/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java deleted file mode 100644 index 13d82da94..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java +++ /dev/null @@ -1,140 +0,0 @@ -package mvm.rya.api.domain.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.hadoop.io.WritableComparable; - -/** - * Date: 7/17/12 - * Time: 1:29 PM - */ -public class RyaStatementWritable implements WritableComparable { - - private RyaTripleContext ryaContext; - private RyaStatement ryaStatement; - - - public RyaStatementWritable(RyaTripleContext ryaContext) { - this.ryaContext = ryaContext; - } - - public RyaStatementWritable(RyaStatement ryaStatement, RyaTripleContext ryaContext) { - this(ryaContext); - this.ryaStatement = ryaStatement; - } - - public RyaStatement getRyaStatement() { - return ryaStatement; - } - - public void setRyaStatement(RyaStatement ryaStatement) { - this.ryaStatement = ryaStatement; - } - - @Override - public int compareTo(Object o) { - if (o instanceof RyaStatementWritable) { - return (getRyaStatement().equals(((RyaStatementWritable) o).getRyaStatement())) ? (0) : (-1); - } - return -1; - } - - @Override - public void write(DataOutput dataOutput) throws IOException { - if (ryaStatement == null) { - throw new IOException("Rya Statement is null"); - } - try { - Map map = ryaContext.serializeTriple(ryaStatement); - TripleRow tripleRow = map.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - byte[] row = tripleRow.getRow(); - byte[] columnFamily = tripleRow.getColumnFamily(); - byte[] columnQualifier = tripleRow.getColumnQualifier(); - write(dataOutput, row); - write(dataOutput, columnFamily); - write(dataOutput, columnQualifier); - write(dataOutput, ryaStatement.getColumnVisibility()); - write(dataOutput, ryaStatement.getValue()); - Long timestamp = ryaStatement.getTimestamp(); - boolean b = timestamp != null; - if (b) { - dataOutput.writeBoolean(b); - dataOutput.writeLong(timestamp); - } - } catch (TripleRowResolverException e) { - throw new IOException(e); - } - } - - protected void write(DataOutput dataOutput, byte[] row) throws IOException { - boolean b = row != null; - dataOutput.writeBoolean(b); - if (b) { - dataOutput.writeInt(row.length); - dataOutput.write(row); - } - } - - protected byte[] read(DataInput dataInput) throws IOException { - if (dataInput.readBoolean()) { - int len = dataInput.readInt(); - byte[] bytes = new byte[len]; - dataInput.readFully(bytes); - return bytes; - } - return null; - } - - @Override - public void readFields(DataInput dataInput) throws IOException { - byte[] row = read(dataInput); - byte[] columnFamily = read(dataInput); - byte[] columnQualifier = read(dataInput); - byte[] columnVisibility = read(dataInput); - byte[] value = read(dataInput); - boolean b = dataInput.readBoolean(); - Long timestamp = null; - if (b) { - timestamp = dataInput.readLong(); - } - try { - ryaStatement = ryaContext.deserializeTriple(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, - new TripleRow(row, columnFamily, columnQualifier)); - ryaStatement.setColumnVisibility(columnVisibility); - ryaStatement.setValue(value); - ryaStatement.setTimestamp(timestamp); - } catch (TripleRowResolverException e) { - throw new IOException(e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java deleted file mode 100644 index 61732d36d..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java +++ /dev/null @@ -1,40 +0,0 @@ -package mvm.rya.api.layout; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Created by IntelliJ IDEA. - * Date: 4/25/12 - * Time: 12:20 PM - * To change this template use File | Settings | File Templates. - */ -public interface TableLayoutStrategy { - - public String getSpo(); - public String getPo(); - public String getOsp(); - public String getNs(); - public String getEval(); - public String getProspects(); - public String getSelectivity(); - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java deleted file mode 100644 index 0e995ab3c..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java +++ /dev/null @@ -1,85 +0,0 @@ -package mvm.rya.api.layout; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConstants; - -/** - * Created by IntelliJ IDEA. - * Date: 4/25/12 - * Time: 12:20 PM - * To change this template use File | Settings | File Templates. - */ -public class TablePrefixLayoutStrategy implements TableLayoutStrategy{ - private String tablePrefix = RdfCloudTripleStoreConstants.TBL_PRFX_DEF; - - public TablePrefixLayoutStrategy() { - } - - public TablePrefixLayoutStrategy(String tablePrefix) { - this.tablePrefix = tablePrefix; - } - - @Override - public String getSpo() { - return tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX; - } - - @Override - public String getPo() { - return tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX; - } - - @Override - public String getOsp() { - return tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX; - } - - @Override - public String getNs() { - return tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX; - } - - @Override - public String getEval() { - return tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX; - } - - @Override - public String getProspects() { - return tablePrefix + RdfCloudTripleStoreConstants.TBL_STATS_SUFFIX; - } - - @Override - public String getSelectivity() { - return tablePrefix + RdfCloudTripleStoreConstants.TBL_SEL_SUFFIX; - } - - - public String getTablePrefix() { - return tablePrefix; - } - - public void setTablePrefix(String tablePrefix) { - this.tablePrefix = tablePrefix; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java deleted file mode 100644 index 54444d4f3..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java +++ /dev/null @@ -1,44 +0,0 @@ -package mvm.rya.api.persist; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Class RdfDAOException - * Date: Feb 28, 2012 - * Time: 3:39:36 PM - */ -public class RdfDAOException extends RuntimeException { - public RdfDAOException() { - } - - public RdfDAOException(String s) { - super(s); - } - - public RdfDAOException(String s, Throwable throwable) { - super(s, throwable); - } - - public RdfDAOException(Throwable throwable) { - super(throwable); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java deleted file mode 100644 index 020464bf1..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java +++ /dev/null @@ -1,54 +0,0 @@ -package mvm.rya.api.persist; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; - -import org.openrdf.model.Resource; -import org.openrdf.model.Value; - -/** - * Class RdfEvalStatsDAO - * Date: Feb 28, 2012 - * Time: 4:17:05 PM - */ -public interface RdfEvalStatsDAO { - public enum CARDINALITY_OF { - SUBJECT, PREDICATE, OBJECT, SUBJECTPREDICATE, SUBJECTOBJECT, PREDICATEOBJECT - } - - public void init() throws RdfDAOException; - - public boolean isInitialized() throws RdfDAOException; - - public void destroy() throws RdfDAOException; - - public double getCardinality(C conf, CARDINALITY_OF card, List val) throws RdfDAOException; - public double getCardinality(C conf, CARDINALITY_OF card, List val, Resource context) throws RdfDAOException; - - public void setConf(C conf); - - public C getConf(); - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java deleted file mode 100644 index 00c246eeb..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java +++ /dev/null @@ -1,35 +0,0 @@ -package mvm.rya.api.persist; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; - -/** - * Date: 7/17/12 - * Time: 8:24 AM - */ -public interface RyaConfigured { - - public void setConf(C conf); - - public C getConf(); -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java deleted file mode 100644 index e326f7de3..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java +++ /dev/null @@ -1,126 +0,0 @@ -package mvm.rya.api.persist; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Iterator; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.query.RyaQueryEngine; - -/** - * Provides the access layer to the Rya triple store. - * - * Date: Feb 28, 2012 - * Time: 3:30:14 PM - */ -public interface RyaDAO extends RyaConfigured { - - /** - * Initialize the RyaDAO. Should only be called once, otherwise, if already initialized, it will - * throw an exception. - * - * @throws RyaDAOException - */ - public void init() throws RyaDAOException; - - /** - * - * @return true if the store is already initiailized - * @throws RyaDAOException - */ - public boolean isInitialized() throws RyaDAOException; - - /** - * Shutdown the store. To reinitialize, call the init() method. - * - * @throws RyaDAOException - */ - public void destroy() throws RyaDAOException; - - /** - * Add and commit a single RyaStatement - * - * @param statement - * @throws RyaDAOException - */ - public void add(RyaStatement statement) throws RyaDAOException; - - /** - * Add and commit a collection of RyaStatements - * - * @param statement - * @throws RyaDAOException - */ - public void add(Iterator statement) throws RyaDAOException; - - /** - * Delete a RyaStatement. The Configuration should provide the auths to perform the delete - * - * @param statement - * @param conf - * @throws RyaDAOException - */ - public void delete(RyaStatement statement, C conf) throws RyaDAOException; - - /** - * Drop a set of Graphs. The Configuration should provide the auths to perform the delete - * - * @param conf - * @throws RyaDAOException - */ - public void dropGraph(C conf, RyaURI... graphs) throws RyaDAOException; - - /** - * Delete a collection of RyaStatements. - * - * @param statements - * @param conf - * @throws RyaDAOException - */ - public void delete(Iterator statements, C conf) throws RyaDAOException; - - /** - * Get the version of the store. - * - * @return - * @throws RyaDAOException - */ - public String getVersion() throws RyaDAOException; - - /** - * Get the Rya query engine - * @return - */ - public RyaQueryEngine getQueryEngine(); - - /** - * Get the Rya Namespace Manager - * @return - */ - public RyaNamespaceManager getNamespaceManager(); - - public void purge(RdfCloudTripleStoreConfiguration configuration); - - public void dropAndDestroy() throws RyaDAOException; -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java deleted file mode 100644 index 232211931..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java +++ /dev/null @@ -1,43 +0,0 @@ -package mvm.rya.api.persist; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Date: 7/17/12 - * Time: 8:20 AM - */ -public class RyaDAOException extends Exception { - public RyaDAOException() { - } - - public RyaDAOException(String s) { - super(s); - } - - public RyaDAOException(String s, Throwable throwable) { - super(s, throwable); - } - - public RyaDAOException(Throwable throwable) { - super(throwable); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java deleted file mode 100644 index 77cd4bdb6..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java +++ /dev/null @@ -1,41 +0,0 @@ -package mvm.rya.api.persist; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.Namespace; - -/** - * Date: 7/17/12 - * Time: 8:23 AM - */ -public interface RyaNamespaceManager extends RyaConfigured { - - public void addNamespace(String pfx, String namespace) throws RyaDAOException; - - public String getNamespace(String pfx) throws RyaDAOException; - - public void removeNamespace(String pfx) throws RyaDAOException; - - public CloseableIteration iterateNamespace() throws RyaDAOException; -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java b/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java deleted file mode 100644 index 8c827c1ee..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java +++ /dev/null @@ -1,45 +0,0 @@ -package mvm.rya.api.persist.index; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.Closeable; -import java.io.Flushable; -import java.io.IOException; -import java.util.Collection; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; - -import org.apache.hadoop.conf.Configurable; - -public interface RyaSecondaryIndexer extends Closeable, Flushable, Configurable { - - public String getTableName(); - - public void storeStatements(Collection statements) throws IOException; - - public void storeStatement(RyaStatement statement) throws IOException; - - public void deleteStatement(RyaStatement stmt) throws IOException; - - public void dropGraph(RyaURI... graphs); - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java b/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java deleted file mode 100644 index 28f797be8..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java +++ /dev/null @@ -1,37 +0,0 @@ -package mvm.rya.api.persist.joinselect; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; - -public interface SelectivityEvalDAO extends RdfEvalStatsDAO { - - public double getJoinSelect(C conf, TupleExpr te1, TupleExpr te2) throws Exception; - - public long getCardinality(C conf, StatementPattern sp) throws Exception; - - public int getTableSize(C conf) throws Exception; - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java deleted file mode 100644 index 113ce5163..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java +++ /dev/null @@ -1,115 +0,0 @@ -package mvm.rya.api.persist.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import com.google.common.collect.Iterables; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; - -/** - * Query domain object contains the query to run as a {@link mvm.rya.api.domain.RyaStatement} and options for running the query - */ -public class BatchRyaQuery extends RyaQueryOptions { - - //queries - private Iterable queries; - - //maximum number of ranges before we use a batchScanner - private int maxRanges = 2; - - public BatchRyaQuery(Iterable queries) { - Preconditions.checkNotNull(queries, "RyaStatement queries cannot be null"); - this.queries = queries; - } - - public static RyaBatchQueryBuilder builder(Iterable queries) { - return new RyaBatchQueryBuilder(queries); - } - - public static class RyaBatchQueryBuilder extends RyaOptionsBuilder { - private BatchRyaQuery ryaQuery; - - public RyaBatchQueryBuilder(Iterable queries) { - this(new BatchRyaQuery(queries)); - } - - public RyaBatchQueryBuilder(BatchRyaQuery query) { - super(query); - this.ryaQuery = query; - } - - public RyaBatchQueryBuilder setMaxRanges(int maxRanges) { - ryaQuery.setMaxRanges(maxRanges); - return this; - } - - public BatchRyaQuery build() { - return ryaQuery; - } - } - - public Iterable getQueries() { - return queries; - } - - public void setQueries(Iterable queries) { - this.queries = queries; - } - - public int getMaxRanges() { - return maxRanges; - } - - public void setMaxRanges(int maxRanges) { - this.maxRanges = maxRanges; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - - BatchRyaQuery that = (BatchRyaQuery) o; - - if (queries != null ? !queries.equals(that.queries) : that.queries != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + (queries != null ? queries.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return "BatchRyaQuery{" + - "queries=" + Iterables.toString(queries) + - "options={" + super.toString() + - '}' + - '}'; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java deleted file mode 100644 index 5235989c1..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java +++ /dev/null @@ -1,97 +0,0 @@ -package mvm.rya.api.persist.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import mvm.rya.api.domain.RyaStatement; - -/** - * Query domain object contains the query to run as a {@link RyaStatement} and options for running the query - */ -public class RyaQuery extends RyaQueryOptions { - - //query - private RyaStatement query; - - public RyaQuery(RyaStatement query) { - Preconditions.checkNotNull(query, "RyaStatement query cannot be null"); - this.query = query; - } - - public static RyaQueryBuilder builder(RyaStatement query) { - return new RyaQueryBuilder(query); - } - - public static class RyaQueryBuilder extends RyaOptionsBuilder { - private RyaQuery ryaQuery; - - public RyaQueryBuilder(RyaStatement query) { - this(new RyaQuery(query)); - } - - public RyaQueryBuilder(RyaQuery query) { - super(query); - this.ryaQuery = query; - } - - public RyaQuery build() { - return ryaQuery; - } - } - - public RyaStatement getQuery() { - return query; - } - - public void setQuery(RyaStatement query) { - this.query = query; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - - RyaQuery ryaQuery = (RyaQuery) o; - - if (query != null ? !query.equals(ryaQuery.query) : ryaQuery.query != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + (query != null ? query.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return "RyaQuery{" + - "query=" + query + - "options={" + super.toString() + - '}' + - '}'; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java deleted file mode 100644 index 7454eea8b..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java +++ /dev/null @@ -1,96 +0,0 @@ -package mvm.rya.api.persist.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaConfigured; -import mvm.rya.api.persist.RyaDAOException; - -import org.calrissian.mango.collect.CloseableIterable; -import org.openrdf.query.BindingSet; - -/** - * Rya Query Engine to perform queries against the Rya triple store. - *

- * Date: 7/17/12 - * Time: 8:25 AM - */ -public interface RyaQueryEngine extends RyaConfigured { - - /** - * Query the Rya store using the RyaStatement. The Configuration object provides information such as auths, ttl, etc - * - * @param stmt - * @param conf - * @return - * @throws RyaDAOException - * @deprecated - */ - public CloseableIteration query(RyaStatement stmt, C conf) throws RyaDAOException; - - /** - * Batch query - * - * @param stmts - * @param conf - * @return - * @throws RyaDAOException - */ - public CloseableIteration, RyaDAOException> - queryWithBindingSet(Collection> stmts, C conf) throws RyaDAOException; - - /** - * Performs intersection joins. - * - * @param stmts - * @param conf - * @return - * @throws RyaDAOException - * @deprecated - */ - public CloseableIteration batchQuery(Collection stmts, C conf) throws RyaDAOException; - - /** - * Query with a {@link} RyaQuery. A single query that will return a {@link CloseableIterable} of RyaStatements - * - * @param ryaQuery - * @return - * @throws RyaDAOException - */ - public CloseableIterable query(RyaQuery ryaQuery) throws RyaDAOException; - - /** - * Run a batch rya query - * - * @param batchRyaQuery - * @return - * @throws RyaDAOException - */ - public CloseableIterable query(BatchRyaQuery batchRyaQuery) throws RyaDAOException; - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java deleted file mode 100644 index c77796e60..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java +++ /dev/null @@ -1,246 +0,0 @@ -package mvm.rya.api.persist.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; - -/** - */ -public class RyaQueryOptions { - private static final Logger logger = LoggerFactory.getLogger(RyaQueryOptions.class); - //options - protected String[] auths; - protected Long ttl; - protected Long currentTime; - protected Long maxResults; - protected Integer numQueryThreads = 4; - protected Integer batchSize = 1000; - protected String regexSubject; - protected String regexPredicate; - protected String regexObject; - protected RdfCloudTripleStoreConfiguration conf; - - public static class RyaOptionsBuilder { - private RyaQueryOptions options; - - public RyaOptionsBuilder(RyaQueryOptions query) { - this.options = query; - } - - public T load(RdfCloudTripleStoreConfiguration conf) { - options.setConf(conf); - return (T) this.setAuths(conf.getAuths()) - .setBatchSize(conf.getBatchSize()) - .setCurrentTime(conf.getStartTime()) - .setMaxResults(conf.getLimit()) - .setNumQueryThreads(conf.getNumThreads()) - .setRegexObject(conf.getRegexObject()) - .setRegexPredicate(conf.getRegexPredicate()) - .setRegexSubject(conf.getRegexSubject()) - .setTtl(conf.getTtl()); - } - - public T setAuths(String[] auths) { - options.setAuths(auths); - return (T) this; - } - - public T setRegexObject(String regexObject) { - options.setRegexObject(regexObject); - return (T) this; - } - - public T setRegexPredicate(String regexPredicate) { - options.setRegexPredicate(regexPredicate); - return (T) this; - } - - public T setRegexSubject(String regexSubject) { - options.setRegexSubject(regexSubject); - return (T) this; - } - - public T setBatchSize(Integer batchSize) { - options.setBatchSize(batchSize); - return (T) this; - } - - public T setNumQueryThreads(Integer numQueryThreads) { - options.setNumQueryThreads(numQueryThreads); - return (T) this; - } - - public T setMaxResults(Long maxResults) { - options.setMaxResults(maxResults); - return (T) this; - } - - public T setCurrentTime(Long currentTime) { - options.setCurrentTime(currentTime); - return (T) this; - } - - public T setTtl(Long ttl) { - options.setTtl(ttl); - return (T) this; - } - } - - public RdfCloudTripleStoreConfiguration getConf() { - return conf; - } - - public void setConf(RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - } - - public Long getTtl() { - return ttl; - } - - public void setTtl(Long ttl) { - this.ttl = ttl; - } - - public Long getCurrentTime() { - return currentTime; - } - - public void setCurrentTime(Long currentTime) { - this.currentTime = currentTime; - } - - public Integer getNumQueryThreads() { - return numQueryThreads; - } - - public void setNumQueryThreads(Integer numQueryThreads) { - this.numQueryThreads = numQueryThreads; - } - - public Long getMaxResults() { - return maxResults; - } - - public void setMaxResults(Long maxResults) { - this.maxResults = maxResults; - } - - public Integer getBatchSize() { - return batchSize; - } - - public void setBatchSize(Integer batchSize) { - this.batchSize = batchSize; - } - - public String getRegexSubject() { - return regexSubject; - } - - public void setRegexSubject(String regexSubject) { - this.regexSubject = regexSubject; - } - - public String getRegexPredicate() { - return regexPredicate; - } - - public void setRegexPredicate(String regexPredicate) { - this.regexPredicate = regexPredicate; - } - - public String getRegexObject() { - return regexObject; - } - - public void setRegexObject(String regexObject) { - this.regexObject = regexObject; - } - - public String[] getAuths() { - return auths; - } - - public void setAuths(String[] auths) { - if (auths == null) { - this.auths = new String[0]; - } else { - this.auths = auths.clone(); - } - } - - @Override - public String toString() { - return "RyaQueryOptions{" + - "auths=" + (auths == null ? null : Arrays.asList(auths)) + - ", ttl=" + ttl + - ", currentTime=" + currentTime + - ", maxResults=" + maxResults + - ", numQueryThreads=" + numQueryThreads + - ", batchSize=" + batchSize + - ", regexSubject='" + regexSubject + '\'' + - ", regexPredicate='" + regexPredicate + '\'' + - ", regexObject='" + regexObject + '\'' + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - RyaQueryOptions that = (RyaQueryOptions) o; - - if (!Arrays.equals(auths, that.auths)) return false; - if (batchSize != null ? !batchSize.equals(that.batchSize) : that.batchSize != null) return false; - if (currentTime != null ? !currentTime.equals(that.currentTime) : that.currentTime != null) return false; - if (maxResults != null ? !maxResults.equals(that.maxResults) : that.maxResults != null) return false; - if (numQueryThreads != null ? !numQueryThreads.equals(that.numQueryThreads) : that.numQueryThreads != null) - return false; - if (regexObject != null ? !regexObject.equals(that.regexObject) : that.regexObject != null) return false; - if (regexPredicate != null ? !regexPredicate.equals(that.regexPredicate) : that.regexPredicate != null) - return false; - if (regexSubject != null ? !regexSubject.equals(that.regexSubject) : that.regexSubject != null) return false; - if (ttl != null ? !ttl.equals(that.ttl) : that.ttl != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = auths != null ? Arrays.hashCode(auths) : 0; - result = 31 * result + (ttl != null ? ttl.hashCode() : 0); - result = 31 * result + (currentTime != null ? currentTime.hashCode() : 0); - result = 31 * result + (maxResults != null ? maxResults.hashCode() : 0); - result = 31 * result + (numQueryThreads != null ? numQueryThreads.hashCode() : 0); - result = 31 * result + (batchSize != null ? batchSize.hashCode() : 0); - result = 31 * result + (regexSubject != null ? regexSubject.hashCode() : 0); - result = 31 * result + (regexPredicate != null ? regexPredicate.hashCode() : 0); - result = 31 * result + (regexObject != null ? regexObject.hashCode() : 0); - return result; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java deleted file mode 100644 index 286ea7a4c..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java +++ /dev/null @@ -1,158 +0,0 @@ -package mvm.rya.api.persist.query.join; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.RyaQueryEngine; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.utils.EnumerationWrapper; - -import java.util.Enumeration; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -/** - * Use HashTable to do a HashJoin. - *

- * TODO: Somehow make a more streaming way of doing this hash join. This will not support large sets. - * Date: 7/26/12 - * Time: 8:58 AM - */ -public class HashJoin implements Join { - - private RyaContext ryaContext = RyaContext.getInstance(); - private RyaQueryEngine ryaQueryEngine; - - public HashJoin() { - } - - public HashJoin(RyaQueryEngine ryaQueryEngine) { - this.ryaQueryEngine = ryaQueryEngine; - } - - @Override - public CloseableIteration join(C conf, RyaURI... preds) throws RyaDAOException { - ConcurrentHashMap, Integer> ht = new ConcurrentHashMap, Integer>(); - int count = 0; - boolean first = true; - for (RyaURI pred : preds) { - count++; - //query - CloseableIteration results = ryaQueryEngine.query(new RyaStatement(null, pred, null), null); - //add to hashtable - while (results.hasNext()) { - RyaStatement next = results.next(); - RyaURI subject = next.getSubject(); - RyaType object = next.getObject(); - Map.Entry entry = new RdfCloudTripleStoreUtils.CustomEntry(subject, object); - if (!first) { - if (!ht.containsKey(entry)) { - continue; //not in join - } - } - ht.put(entry, count); - } - //remove from hashtable values that are under count - if (first) { - first = false; - } else { - for (Map.Entry, Integer> entry : ht.entrySet()) { - if (entry.getValue() < count) { - ht.remove(entry.getKey()); - } - } - } - } - final Enumeration> keys = ht.keys(); - return new CloseableIteration() { - @Override - public void close() throws RyaDAOException { - - } - - @Override - public boolean hasNext() throws RyaDAOException { - return keys.hasMoreElements(); - } - - @Override - public RyaStatement next() throws RyaDAOException { - Map.Entry subjObj = keys.nextElement(); - return new RyaStatement(subjObj.getKey(), null, subjObj.getValue()); - } - - @Override - public void remove() throws RyaDAOException { - keys.nextElement(); - } - }; - } - - @Override - public CloseableIteration join(C conf, Map.Entry... predObjs) throws RyaDAOException { - ConcurrentHashMap ht = new ConcurrentHashMap(); - int count = 0; - boolean first = true; - for (Map.Entry predObj : predObjs) { - count++; - RyaURI pred = predObj.getKey(); - RyaType obj = predObj.getValue(); - //query - CloseableIteration results = ryaQueryEngine.query(new RyaStatement(null, pred, obj), null); - //add to hashtable - while (results.hasNext()) { - RyaURI subject = results.next().getSubject(); - if (!first) { - if (!ht.containsKey(subject)) { - continue; //not in join - } - } - ht.put(subject, count); - } - //remove from hashtable values that are under count - if (first) { - first = false; - } else { - for (Map.Entry entry : ht.entrySet()) { - if (entry.getValue() < count) { - ht.remove(entry.getKey()); - } - } - } - } - return new EnumerationWrapper(ht.keys()); - } - - public RyaQueryEngine getRyaQueryEngine() { - return ryaQueryEngine; - } - - public void setRyaQueryEngine(RyaQueryEngine ryaQueryEngine) { - this.ryaQueryEngine = ryaQueryEngine; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java deleted file mode 100644 index 3cb48a51a..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java +++ /dev/null @@ -1,233 +0,0 @@ -package mvm.rya.api.persist.query.join; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import info.aduna.iteration.CloseableIteration; -import info.aduna.iteration.ConvertingIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.*; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.RyaQueryEngine; -import mvm.rya.api.resolver.RyaContext; -import org.openrdf.query.BindingSet; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; - -/** - * Date: 7/24/12 - * Time: 8:52 AM - */ -public class IterativeJoin implements Join { - - private RyaContext ryaContext = RyaContext.getInstance(); - private RyaQueryEngine ryaQueryEngine; - - public IterativeJoin() { - } - - public IterativeJoin(RyaQueryEngine ryaQueryEngine) { - this.ryaQueryEngine = ryaQueryEngine; - } - - /** - * Return all statements that have input predicates. Predicates must not be null or ranges - * - * @param preds - * @return - */ - @Override - public CloseableIteration join(C conf, RyaURI... preds) - throws RyaDAOException { - Preconditions.checkNotNull(preds); - Preconditions.checkArgument(preds.length > 1, "Must join 2 or more"); - //TODO: Reorder predObjs based on statistics - - CloseableIteration iter = null; - for (RyaURI pred : preds) { - if (iter == null) { - iter = ryaQueryEngine.query(new RyaStatement(null, pred, null), null); - } else { - iter = join(iter, pred); - } - } - - return iter; - } - - /** - * Return all subjects that have the predicate objects associated. Predicate and objects must be not null or ranges - * to ensure sorting - * - * @param predObjs - * @return - * @throws mvm.rya.api.persist.RyaDAOException - * - */ - @Override - public CloseableIteration join(C conf, Map.Entry... predObjs) - throws RyaDAOException { - Preconditions.checkNotNull(predObjs); - Preconditions.checkArgument(predObjs.length > 1, "Must join 2 or more"); - - //TODO: Reorder predObjs based on statistics - CloseableIteration first = null; - CloseableIteration iter = null; - for (Map.Entry entry : predObjs) { - if (first == null) { - first = ryaQueryEngine.query(new RyaStatement(null, entry.getKey(), entry.getValue()), null); - } else if (iter == null) { - iter = join(new ConvertingIteration(first) { - - @Override - protected RyaURI convert(RyaStatement statement) throws RyaDAOException { - return statement.getSubject(); - } - }, entry); - } else { - iter = join(iter, entry); - } - } - - return iter; - } - - protected CloseableIteration join(final CloseableIteration iteration, - final Map.Entry predObj) { - //TODO: configure batch - //TODO: batch = 1, does not work - final int batch = 100; - return new CloseableIteration() { - - private CloseableIteration, RyaDAOException> query; - - @Override - public void close() throws RyaDAOException { - iteration.close(); - if (query != null) { - query.close(); - } - } - - @Override - public boolean hasNext() throws RyaDAOException { - return !(query == null || !query.hasNext()) || batchNext(); - } - - @Override - public RyaURI next() throws RyaDAOException { - if (query == null || !query.hasNext()) { - if (!batchNext()) return null; - } - if (query != null && query.hasNext()) { - return query.next().getKey().getSubject(); - } else { - return null; - } - } - - private boolean batchNext() throws RyaDAOException { - if (!iteration.hasNext()) { - return false; - } - Collection> batchedResults = new ArrayList>(); - for (int i = 0; i < batch && iteration.hasNext(); i++) { - batchedResults.add(new RdfCloudTripleStoreUtils.CustomEntry( - new RyaStatement(iteration.next(), predObj.getKey(), predObj.getValue()), null)); - } - query = ryaQueryEngine.queryWithBindingSet(batchedResults, null); - return query.hasNext(); - } - - @Override - public void remove() throws RyaDAOException { - this.next(); - } - }; - } - - protected CloseableIteration join( - final CloseableIteration iteration, final RyaURI pred) { - //TODO: configure batch - //TODO: batch = 1, does not work - final int batch = 100; - return new CloseableIteration() { - - private CloseableIteration, RyaDAOException> query; - - @Override - public void close() throws RyaDAOException { - iteration.close(); - if (query != null) { - query.close(); - } - } - - @Override - public boolean hasNext() throws RyaDAOException { - return !(query == null || !query.hasNext()) || batchNext(); - } - - @Override - public RyaStatement next() throws RyaDAOException { - if (query == null || !query.hasNext()) { - if (!batchNext()) return null; - } - if (query != null && query.hasNext()) { - return query.next().getKey(); - } else { - return null; - } - } - - private boolean batchNext() throws RyaDAOException { - if (!iteration.hasNext()) { - return false; - } - Collection> batchedResults = new ArrayList>(); - for (int i = 0; i < batch && iteration.hasNext(); i++) { - RyaStatement next = iteration.next(); - batchedResults.add(new RdfCloudTripleStoreUtils.CustomEntry( - new RyaStatement(next.getSubject(), pred, next.getObject()), null)); - } - query = ryaQueryEngine.queryWithBindingSet(batchedResults, null); - return query.hasNext(); - } - - @Override - public void remove() throws RyaDAOException { - this.next(); - } - }; - } - - public RyaQueryEngine getRyaQueryEngine() { - return ryaQueryEngine; - } - - public void setRyaQueryEngine(RyaQueryEngine ryaQueryEngine) { - this.ryaQueryEngine = ryaQueryEngine; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java deleted file mode 100644 index 775af5371..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java +++ /dev/null @@ -1,44 +0,0 @@ -package mvm.rya.api.persist.query.join; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; - -import java.util.Map; - -/** - * Date: 7/24/12 - * Time: 4:28 PM - */ -public interface Join { - - CloseableIteration join(C conf, RyaURI... preds) - throws RyaDAOException; - - CloseableIteration join(C conf, Map.Entry... predObjs) - throws RyaDAOException; -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java deleted file mode 100644 index 1dfcbf11e..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java +++ /dev/null @@ -1,244 +0,0 @@ -package mvm.rya.api.persist.query.join; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import info.aduna.iteration.CloseableIteration; -import info.aduna.iteration.EmptyIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.*; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.RyaQueryEngine; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.utils.PeekingCloseableIteration; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * Date: 7/24/12 - * Time: 8:52 AM - */ -public class MergeJoin implements Join { - - private RyaContext ryaContext = RyaContext.getInstance(); - private RyaQueryEngine ryaQueryEngine; - - public MergeJoin() { - } - - public MergeJoin(RyaQueryEngine ryaQueryEngine) { - this.ryaQueryEngine = ryaQueryEngine; - } - - /** - * Return all statements that have input predicates. Predicates must not be null or ranges - * - * @param preds - * @return - */ - @Override - public CloseableIteration join(C conf, RyaURI... preds) - throws RyaDAOException { - Preconditions.checkNotNull(preds); - Preconditions.checkArgument(preds.length > 1, "Must join 2 or more"); - //TODO: Reorder predObjs based on statistics - final List> iters = new ArrayList>(); - for (RyaURI predicate : preds) { - Preconditions.checkArgument(predicate != null && !(predicate instanceof RyaRange)); - - CloseableIteration iter = ryaQueryEngine.query(new RyaStatement(null, predicate, null), conf); - iters.add(iter); - } - Preconditions.checkArgument(iters.size() > 1, "Must join 2 or more"); - - final CloseableIteration first = iters.remove(0); - - //perform merge operation - - return new CloseableIteration() { - - private RyaStatement first_stmt; - private RyaType first_obj; - - @Override - public void close() throws RyaDAOException { - for (CloseableIteration iter : iters) { - iter.close(); - } - } - - @Override - public boolean hasNext() throws RyaDAOException { - return first_stmt != null || check(); - } - - @Override - public RyaStatement next() throws RyaDAOException { - if (first_stmt != null) { - RyaStatement temp = first_stmt; - first_stmt = null; - return temp; - } - if (check()) { - RyaStatement temp = first_stmt; - first_stmt = null; - return temp; - } - return null; - } - - @Override - public void remove() throws RyaDAOException { - this.next(); - } - - protected boolean check() throws RyaDAOException { - if (!first.hasNext()) return false; - first_stmt = first.next(); - first_obj = first_stmt.getObject(); - for (CloseableIteration iter : iters) { - if (!iter.hasNext()) return false; //no more left to join - RyaType iter_obj = iter.next().getObject(); - while (first_obj.compareTo(iter_obj) < 0) { - if (!first.hasNext()) return false; - first_obj = first.next().getObject(); - } - while (first_obj.compareTo(iter_obj) > 0) { - if (!iter.hasNext()) return false; - iter_obj = iter.next().getObject(); - } - } - return true; - } - }; - } - - /** - * Return all subjects that have the predicate objects associated. Predicate and objects must be not null or ranges - * to ensure sorting - * - * @param predObjs - * @return - * @throws RyaDAOException - */ - @Override - public CloseableIteration join(C conf, Map.Entry... predObjs) - throws RyaDAOException { - Preconditions.checkNotNull(predObjs); - Preconditions.checkArgument(predObjs.length > 1, "Must join 2 or more"); - - //TODO: Reorder predObjs based on statistics - final List> iters = new ArrayList>(); - RyaURI earliest_subject = null; - for (Map.Entry predObj : predObjs) { - RyaURI predicate = predObj.getKey(); - RyaType object = predObj.getValue(); - Preconditions.checkArgument(predicate != null && !(predicate instanceof RyaRange)); - Preconditions.checkArgument(object != null && !(object instanceof RyaRange)); - - PeekingCloseableIteration iter = null; - if (earliest_subject == null) { - iter = new PeekingCloseableIteration( - ryaQueryEngine.query(new RyaStatement(null, predicate, object), conf)); - } else { - iter = new PeekingCloseableIteration( - ryaQueryEngine.query(new RyaStatement(new RyaURIRange(earliest_subject, RyaURIRange.LAST_URI), predicate, object), conf)); - } - if (!iter.hasNext()) { - return new EmptyIteration(); - } - //setting up range to make performant query - earliest_subject = iter.peek().getSubject(); - iters.add(iter); - } - Preconditions.checkArgument(iters.size() > 1, "Must join 2 or more"); - - final CloseableIteration first = iters.remove(0); - - //perform merge operation - - return new CloseableIteration() { - - private RyaURI first_subj; - - @Override - public void close() throws RyaDAOException { - for (CloseableIteration iter : iters) { - iter.close(); - } - } - - @Override - public boolean hasNext() throws RyaDAOException { - return first_subj != null || check(); - } - - @Override - public RyaURI next() throws RyaDAOException { - if (first_subj != null) { - RyaURI temp = first_subj; - first_subj = null; - return temp; - } - if (check()) { - RyaURI temp = first_subj; - first_subj = null; - return temp; - } - return null; - } - - @Override - public void remove() throws RyaDAOException { - this.next(); - } - - protected boolean check() throws RyaDAOException { - if (!first.hasNext()) return false; - first_subj = first.next().getSubject(); - for (CloseableIteration iter : iters) { - if (!iter.hasNext()) return false; //no more left to join - RyaURI iter_subj = iter.next().getSubject(); - while (first_subj.compareTo(iter_subj) < 0) { - if (!first.hasNext()) return false; - first_subj = first.next().getSubject(); - } - while (first_subj.compareTo(iter_subj) > 0) { - if (!iter.hasNext()) return false; - iter_subj = iter.next().getSubject(); - } - } - return true; - } - }; - } - - public RyaQueryEngine getRyaQueryEngine() { - return ryaQueryEngine; - } - - public void setRyaQueryEngine(RyaQueryEngine ryaQueryEngine) { - this.ryaQueryEngine = ryaQueryEngine; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java b/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java deleted file mode 100644 index 81f42b426..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java +++ /dev/null @@ -1,176 +0,0 @@ -package mvm.rya.api.persist.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.api.utils.NullableStatementImpl; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; -import java.util.NoSuchElementException; - -/** - * Date: 7/20/12 - * Time: 10:36 AM - */ -public class RyaDAOHelper { - - public static CloseableIteration query(RyaDAO ryaDAO, Resource subject, URI predicate, Value object, RdfCloudTripleStoreConfiguration conf, Resource... contexts) throws QueryEvaluationException { - return query(ryaDAO, new NullableStatementImpl(subject, predicate, object, contexts), conf); - } - - public static CloseableIteration query(RyaDAO ryaDAO, Statement stmt, RdfCloudTripleStoreConfiguration conf) throws QueryEvaluationException { - final CloseableIteration query; - try { - query = ryaDAO.getQueryEngine().query(RdfToRyaConversions.convertStatement(stmt), - conf); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - //TODO: only support one context for now - return new CloseableIteration() { //TODO: Create a new class struct for this - - private boolean isClosed = false; - @Override - public void close() throws QueryEvaluationException { - try { - isClosed = true; - query.close(); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - try { - return query.hasNext(); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public Statement next() throws QueryEvaluationException { - if (!hasNext() || isClosed) { - throw new NoSuchElementException(); - } - - try { - RyaStatement next = query.next(); - if (next == null) { - return null; - } - return RyaToRdfConversions.convertStatement(next); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public void remove() throws QueryEvaluationException { - try { - query.remove(); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - }; - } - - public static CloseableIteration, QueryEvaluationException> query(RyaDAO ryaDAO, Collection> statements, RdfCloudTripleStoreConfiguration conf) throws QueryEvaluationException { - Collection> ryaStatements = new ArrayList>(statements.size()); - for (Map.Entry entry : statements) { - ryaStatements.add(new RdfCloudTripleStoreUtils.CustomEntry - (RdfToRyaConversions.convertStatement(entry.getKey()), entry.getValue())); - } - final CloseableIteration, RyaDAOException> query; - try { - query = ryaDAO.getQueryEngine().queryWithBindingSet(ryaStatements, conf); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - return new CloseableIteration, QueryEvaluationException>() { //TODO: Create a new class struct for this - private boolean isClosed = false; - - @Override - public void close() throws QueryEvaluationException { - isClosed = true; - try { - query.close(); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - try { - return query.hasNext(); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public Map.Entry next() throws QueryEvaluationException { - if (!hasNext() || isClosed) { - throw new NoSuchElementException(); - } - try { - - Map.Entry next = query.next(); - if (next == null) { - return null; - } - return new RdfCloudTripleStoreUtils.CustomEntry(RyaToRdfConversions.convertStatement(next.getKey()), next.getValue()); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public void remove() throws QueryEvaluationException { - try { - query.remove(); - } catch (RyaDAOException e) { - throw new QueryEvaluationException(e); - } - } - }; - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java deleted file mode 100644 index 5171feb27..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java +++ /dev/null @@ -1,93 +0,0 @@ -package mvm.rya.api.query.strategy; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.triple.TripleRowRegex; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM; - -/** - * Date: 7/14/12 - * Time: 8:06 AM - */ -public abstract class AbstractTriplePatternStrategy implements TriplePatternStrategy { - public static final String ALL_REGEX = "([\\s\\S]*)"; - - public abstract RdfCloudTripleStoreConstants.TABLE_LAYOUT getLayout(); - - @Override - public TripleRowRegex buildRegex(String subject, String predicate, String object, String context, byte[] objectTypeInfo) { - RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = getLayout(); - Preconditions.checkNotNull(table_layout); - if (subject == null && predicate == null && object == null && context == null && objectTypeInfo == null) { - return null; //no regex - } - StringBuilder sb = new StringBuilder(); - String first = subject; - String second = predicate; - String third = object; - if (table_layout == RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO) { - first = predicate; - second = object; - third = subject; - } else if (table_layout == RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP) { - first = object; - second = subject; - third = predicate; - } - - if (first != null) { - sb.append(first); - } else { - sb.append(ALL_REGEX); - } - sb.append(DELIM); - - if (second != null) { - sb.append(second); - } else { - sb.append(ALL_REGEX); - } - sb.append(DELIM); - - if (third != null) { - sb.append(third); - if (objectTypeInfo == null) { - sb.append(TYPE_DELIM); - sb.append(ALL_REGEX); - }else { - sb.append(new String(objectTypeInfo)); - } - }else { - sb.append(ALL_REGEX); - if (objectTypeInfo != null) { - sb.append(new String(objectTypeInfo)); - } - } - - return new TripleRowRegex(sb.toString(), (context != null) ? (context + ALL_REGEX) : null, null); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java deleted file mode 100644 index 6ebc72232..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java +++ /dev/null @@ -1,45 +0,0 @@ -package mvm.rya.api.query.strategy; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Date: 1/10/13 - * Time: 12:47 PM - */ -public class ByteRange { - - private byte[] start; - private byte[] end; - - public ByteRange(byte[] start, byte[] end) { - this.start = start; - this.end = end; - } - - public byte[] getStart() { - return start; - } - - public byte[] getEnd() { - return end; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java deleted file mode 100644 index 7b7eb3922..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java +++ /dev/null @@ -1,49 +0,0 @@ -package mvm.rya.api.query.strategy; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.triple.TripleRowRegex; - -import java.io.IOException; -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; - -/** - * Date: 7/14/12 - * Time: 7:21 AM - */ -public interface TriplePatternStrategy { - - public Map.Entry defineRange(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, - RdfCloudTripleStoreConfiguration conf) throws IOException; - - public TABLE_LAYOUT getLayout(); - - public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context); - - public TripleRowRegex buildRegex(String subject, String predicate, String object, String context, byte[] objectTypeInfo); - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java deleted file mode 100644 index 04d81cea6..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java +++ /dev/null @@ -1,135 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES; -import static mvm.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES; - -import java.io.IOException; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.query.strategy.AbstractTriplePatternStrategy; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; - -import com.google.common.primitives.Bytes; - -/** - * Date: 7/14/12 - * Time: 7:35 AM - */ -public class HashedPoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrategy { - - @Override - public RdfCloudTripleStoreConstants.TABLE_LAYOUT getLayout() { - return RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO; - } - - @Override - public Map.Entry defineRange(RyaURI subject, RyaURI predicate, RyaType object, - RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException { - try { - //po(ng) - //po_r(s)(ng) - //p(ng) - //p_r(o)(ng) - //r(p)(ng) - if (!handles(subject, predicate, object, context)) return null; - - RyaContext ryaContext = RyaContext.getInstance(); - MessageDigest md = MessageDigest.getInstance("MD5"); - - - RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO; - byte[] start, stop; - if (object != null) { - if (object instanceof RyaRange) { - //p_r(o) - RyaRange rv = (RyaRange) object; - rv = ryaContext.transformRange(rv); - byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0]; - byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0]; - byte[] predBytes = predicate.getData().getBytes(); - byte[] predHash = md.digest(predBytes); - start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objStartBytes); - stop = Bytes.concat(predHash, DELIM_BYTES, predBytes,DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES); - } else { - if (subject != null && subject instanceof RyaRange) { - //po_r(s) - RyaRange ru = (RyaRange) subject; - ru = ryaContext.transformRange(ru); - byte[] subjStartBytes = ru.getStart().getData().getBytes(); - byte[] subjStopBytes = ru.getStop().getData().getBytes(); - byte[] predBytes = predicate.getData().getBytes(); - byte[] predHash = md.digest(predBytes); - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStartBytes); - stop = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStopBytes, TYPE_DELIM_BYTES, LAST_BYTES); - } else { - //po - //TODO: There must be a better way than creating multiple byte[] - byte[] predBytes = predicate.getData().getBytes(); - byte[] predHash = md.digest(predBytes); - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objBytes, DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } - } else { - //p - byte[] predBytes = predicate.getData().getBytes(); - byte[] predHash = md.digest(predBytes); - start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - return new RdfCloudTripleStoreUtils.CustomEntry(table_layout, new ByteRange(start, stop)); - } catch (RyaTypeResolverException e) { - throw new IOException(e); - } catch (NoSuchAlgorithmException e) { - throw new IOException(e); - } - } - - @Override - public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) { - //po(ng) - //p_r(o)(ng) - //po_r(s)(ng) - //p(ng) - //r(p)(ng) - if ((predicate == null) || (predicate instanceof RyaRange)) return false; - if (subject != null && !(subject instanceof RyaRange)) return false; - return subject == null || object != null; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java deleted file mode 100644 index b7204a9f4..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java +++ /dev/null @@ -1,138 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.primitives.Bytes; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.domain.RyaURIRange; -import mvm.rya.api.query.strategy.AbstractTriplePatternStrategy; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; - -import java.io.IOException; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.*; - -/** - * Date: 7/14/12 - * Time: 7:35 AM - */ -public class HashedSpoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrategy { - - - @Override - public TABLE_LAYOUT getLayout() { - return TABLE_LAYOUT.SPO; - } - - @Override - public Map.Entry defineRange(RyaURI subject, RyaURI predicate, RyaType object, - RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException { - try { - //spo(ng) - //sp(ng) - //s(ng) - //sp_r(o)(ng) - //s_r(p)(ng) - if (!handles(subject, predicate, object, context)) return null; - MessageDigest md = MessageDigest.getInstance("MD5"); - - RyaContext ryaContext = RyaContext.getInstance(); - - TABLE_LAYOUT table_layout = TABLE_LAYOUT.SPO; - byte[] start; - byte[] stop; - if (predicate != null) { - if (object != null) { - if (object instanceof RyaRange) { - //sp_r(o) - //range = sp_r(o.s)->sp_r(o.e) (remove last byte to remove type info) - RyaRange rv = (RyaRange) object; - rv = ryaContext.transformRange(rv); - byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0]; - byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0]; - byte[] subjBytes = subject.getData().getBytes(); - byte[] hashSubj = md.digest(subjBytes); - byte[] predBytes = predicate.getData().getBytes(); - start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objStartBytes); - stop = Bytes.concat(hashSubj, DELIM_BYTES,subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES); - } else { - //spo - //range = spo->spo (remove last byte to remove type info) - //TODO: There must be a better way than creating multiple byte[] - byte[] subjBytes = subject.getData().getBytes(); - byte[] hashSubj = md.digest(subjBytes); - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES, objBytes, TYPE_DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } else if (predicate instanceof RyaRange) { - //s_r(p) - //range = s_r(p.s)->s_r(p.e) - RyaRange rv = (RyaRange) predicate; - rv = ryaContext.transformRange(rv); - byte[] subjBytes = subject.getData().getBytes(); - byte[] hashSubj = md.digest(subjBytes); - byte[] predStartBytes = rv.getStart().getData().getBytes(); - byte[] predStopBytes = rv.getStop().getData().getBytes(); - start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predStartBytes); - stop = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predStopBytes, DELIM_BYTES, LAST_BYTES); - } else { - //sp - //range = sp - byte[] subjBytes = subject.getData().getBytes(); - byte[] hashSubj = md.digest(subjBytes); - start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } else { - //s - //range = s - byte[] subjBytes = subject.getData().getBytes(); - byte[] hashSubj = md.digest(subjBytes); - start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - return new RdfCloudTripleStoreUtils.CustomEntry(table_layout, - new ByteRange(start, stop)); - } catch (RyaTypeResolverException e) { - throw new IOException(e); - } catch (NoSuchAlgorithmException e) { - throw new IOException(e); - } - } - - @Override - public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) { - //if subject is not null and not a range (if predicate is null then object must be null) - return (subject != null && !(subject instanceof RyaURIRange)) && !((predicate == null || predicate instanceof RyaURIRange) && (object != null)); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java deleted file mode 100644 index 24f5852e8..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java +++ /dev/null @@ -1,113 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES; -import static mvm.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES; - -import java.io.IOException; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.query.strategy.AbstractTriplePatternStrategy; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; - -import com.google.common.primitives.Bytes; - -/** - * Date: 7/14/12 - * Time: 7:35 AM - */ -public class OspWholeRowTriplePatternStrategy extends AbstractTriplePatternStrategy { - - @Override - public TABLE_LAYOUT getLayout() { - return TABLE_LAYOUT.OSP; - } - - @Override - public Map.Entry defineRange(RyaURI subject, RyaURI predicate, RyaType object, - RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException { - try { - //os(ng) - //o_r(s)(ng) - //o(ng) - //r(o) - if (!handles(subject, predicate, object, context)) return null; - - RyaContext ryaContext = RyaContext.getInstance(); - - TABLE_LAYOUT table_layout = TABLE_LAYOUT.OSP; - byte[] start, stop; - if (subject != null) { - if (subject instanceof RyaRange) { - //o_r(s) - RyaRange ru = (RyaRange) subject; - ru = ryaContext.transformRange(ru); - byte[] subjStartBytes = ru.getStart().getData().getBytes(); - byte[] subjEndBytes = ru.getStop().getData().getBytes(); - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(objBytes, DELIM_BYTES, subjStartBytes); - stop = Bytes.concat(objBytes, DELIM_BYTES, subjEndBytes, DELIM_BYTES, LAST_BYTES); - } else { - //os - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(objBytes, DELIM_BYTES, subject.getData().getBytes(), DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } else { - if (object instanceof RyaRange) { - //r(o) - RyaRange rv = (RyaRange) object; - rv = ryaContext.transformRange(rv); - start = ryaContext.serializeType(rv.getStart())[0]; - stop = Bytes.concat(ryaContext.serializeType(rv.getStop())[0], DELIM_BYTES, LAST_BYTES); - } else { - //o - start = Bytes.concat(ryaContext.serializeType(object)[0], DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } - return new RdfCloudTripleStoreUtils.CustomEntry(table_layout, new ByteRange(start, stop)); - } catch (RyaTypeResolverException e) { - throw new IOException(e); - } - } - - @Override - public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) { - //os(ng) - //o_r(s)(ng) - //o(ng) - //r(o) - return object != null && (!(object instanceof RyaRange) || predicate == null && subject == null); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java deleted file mode 100644 index 3f050e042..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java +++ /dev/null @@ -1,128 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.primitives.Bytes; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.query.strategy.AbstractTriplePatternStrategy; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; - -import java.io.IOException; -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.*; - -/** - * Date: 7/14/12 - * Time: 7:35 AM - */ -public class PoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrategy { - - @Override - public RdfCloudTripleStoreConstants.TABLE_LAYOUT getLayout() { - return RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO; - } - - @Override - public Map.Entry defineRange(RyaURI subject, RyaURI predicate, RyaType object, - RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException { - try { - //po(ng) - //po_r(s)(ng) - //p(ng) - //p_r(o)(ng) - //r(p)(ng) - if (!handles(subject, predicate, object, context)) return null; - - RyaContext ryaContext = RyaContext.getInstance(); - - RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO; - byte[] start, stop; - if (object != null) { - if (object instanceof RyaRange) { - //p_r(o) - RyaRange rv = (RyaRange) object; - rv = ryaContext.transformRange(rv); - byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0]; - byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0]; - byte[] predBytes = predicate.getData().getBytes(); - start = Bytes.concat(predBytes, DELIM_BYTES, objStartBytes); - stop = Bytes.concat(predBytes, DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES); - } else { - if (subject != null && subject instanceof RyaRange) { - //po_r(s) - RyaRange ru = (RyaRange) subject; - ru = ryaContext.transformRange(ru); - byte[] subjStartBytes = ru.getStart().getData().getBytes(); - byte[] subjStopBytes = ru.getStop().getData().getBytes(); - byte[] predBytes = predicate.getData().getBytes(); - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStartBytes); - stop = Bytes.concat(predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStopBytes, TYPE_DELIM_BYTES, LAST_BYTES); - } else { - //po - //TODO: There must be a better way than creating multiple byte[] - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(predicate.getData().getBytes(), DELIM_BYTES, objBytes, DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } - } else if (predicate instanceof RyaRange) { - //r(p) - RyaRange rv = (RyaRange) predicate; - rv = ryaContext.transformRange(rv); - start = rv.getStart().getData().getBytes(); - stop = Bytes.concat(rv.getStop().getData().getBytes(), DELIM_BYTES, LAST_BYTES); - } else { - //p - start = Bytes.concat(predicate.getData().getBytes(), DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - return new RdfCloudTripleStoreUtils.CustomEntry(table_layout, new ByteRange(start, stop)); - } catch (RyaTypeResolverException e) { - throw new IOException(e); - } - } - - @Override - public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) { - //po(ng) - //p_r(o)(ng) - //po_r(s)(ng) - //p(ng) - //r(p)(ng) - if (predicate == null) return false; - if (subject != null && !(subject instanceof RyaRange)) return false; - if (predicate instanceof RyaRange) - return object == null && subject == null; - return subject == null || object != null; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java deleted file mode 100644 index 2b91a4b12..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java +++ /dev/null @@ -1,130 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.primitives.Bytes; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.domain.RyaURIRange; -import mvm.rya.api.query.strategy.AbstractTriplePatternStrategy; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; - -import java.io.IOException; -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.*; - -/** - * Date: 7/14/12 - * Time: 7:35 AM - */ -public class SpoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrategy { - - @Override - public TABLE_LAYOUT getLayout() { - return TABLE_LAYOUT.SPO; - } - - @Override - public Map.Entry defineRange(RyaURI subject, RyaURI predicate, RyaType object, - RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException { - try { - //spo(ng) - //sp(ng) - //s(ng) - //sp_r(o)(ng) - //s_r(p)(ng) - if (!handles(subject, predicate, object, context)) return null; - - RyaContext ryaContext = RyaContext.getInstance(); - - TABLE_LAYOUT table_layout = TABLE_LAYOUT.SPO; - byte[] start; - byte[] stop; - if (predicate != null) { - if (object != null) { - if (object instanceof RyaRange) { - //sp_r(o) - //range = sp_r(o.s)->sp_r(o.e) (remove last byte to remove type info) - RyaRange rv = (RyaRange) object; - rv = ryaContext.transformRange(rv); - byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0]; - byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0]; - byte[] subjBytes = subject.getData().getBytes(); - byte[] predBytes = predicate.getData().getBytes(); - start = Bytes.concat(subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objStartBytes); - stop = Bytes.concat(subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES); - } else { - //spo - //range = spo->spo (remove last byte to remove type info) - //TODO: There must be a better way than creating multiple byte[] - byte[] objBytes = ryaContext.serializeType(object)[0]; - start = Bytes.concat(subject.getData().getBytes(), DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES, objBytes, TYPE_DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } else if (predicate instanceof RyaRange) { - //s_r(p) - //range = s_r(p.s)->s_r(p.e) - RyaRange rv = (RyaRange) predicate; - rv = ryaContext.transformRange(rv); - byte[] subjBytes = subject.getData().getBytes(); - byte[] predStartBytes = rv.getStart().getData().getBytes(); - byte[] predStopBytes = rv.getStop().getData().getBytes(); - start = Bytes.concat(subjBytes, DELIM_BYTES, predStartBytes); - stop = Bytes.concat(subjBytes, DELIM_BYTES, predStopBytes, DELIM_BYTES, LAST_BYTES); - } else { - //sp - //range = sp - start = Bytes.concat(subject.getData().getBytes(), DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - } else if (subject instanceof RyaRange) { - //r(s) - //range = r(s.s) -> r(s.e) - RyaRange ru = (RyaRange) subject; - ru = ryaContext.transformRange(ru); - start = ru.getStart().getData().getBytes(); - stop = Bytes.concat(ru.getStop().getData().getBytes(), DELIM_BYTES, LAST_BYTES); - } else { - //s - //range = s - start = Bytes.concat(subject.getData().getBytes(), DELIM_BYTES); - stop = Bytes.concat(start, LAST_BYTES); - } - return new RdfCloudTripleStoreUtils.CustomEntry(table_layout, - new ByteRange(start, stop)); - } catch (RyaTypeResolverException e) { - throw new IOException(e); - } - } - - @Override - public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) { - //if subject is not null and (if predicate is null then object must be null) - return (subject != null && !(subject instanceof RyaURIRange && predicate != null)) && !((predicate == null || predicate instanceof RyaURIRange) && (object != null)); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java deleted file mode 100644 index 3a6d12556..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java +++ /dev/null @@ -1,57 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.URI; - -/** - * Date: 7/16/12 - * Time: 12:25 PM - */ -public class CustomRyaTypeResolverMapping extends RyaTypeResolverMapping { - - protected URI ryaDataType; - protected byte markerByte; - - public CustomRyaTypeResolverMapping() { - } - - public CustomRyaTypeResolverMapping(URI ryaDataType, byte markerByte) { - this(null, ryaDataType, markerByte); - } - - public CustomRyaTypeResolverMapping(RyaTypeResolver ryaTypeResolver, URI ryaDataType, byte markerByte) { - super(ryaTypeResolver); - this.ryaDataType = ryaDataType; - this.markerByte = markerByte; - } - - @Override - public URI getRyaDataType() { - return ryaDataType; - } - - @Override - byte getMarkerByte() { - return markerByte; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java deleted file mode 100644 index 485bf0d51..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java +++ /dev/null @@ -1,93 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.domain.*; -import org.openrdf.model.*; - -/** - * Date: 7/17/12 - * Time: 8:34 AM - */ -public class RdfToRyaConversions { - - public static RyaURI convertURI(URI uri) { - if (uri == null) return null; - if (uri instanceof RangeURI) { - RangeURI ruri = (RangeURI) uri; - return new RyaURIRange(convertURI(ruri.getStart()), convertURI(ruri.getEnd())); - } - return new RyaURI(uri.stringValue()); - } - - public static RyaType convertLiteral(Literal literal) { - if (literal == null) return null; - if (literal.getDatatype() != null) { - return new RyaType(literal.getDatatype(), literal.stringValue()); - } - //no language literal conversion yet - return new RyaType(literal.stringValue()); - } - - public static RyaType convertValue(Value value) { - if (value == null) return null; - //assuming either uri or Literal here - if(value instanceof Resource) { - return convertResource((Resource) value); - } - if (value instanceof Literal) { - return convertLiteral((Literal) value); - } - if (value instanceof RangeValue) { - RangeValue rv = (RangeValue) value; - if (rv.getStart() instanceof URI) { - return new RyaURIRange(convertURI((URI) rv.getStart()), convertURI((URI) rv.getEnd())); - } else { - //literal - return new RyaTypeRange(convertLiteral((Literal) rv.getStart()), convertLiteral((Literal) rv.getEnd())); - } - } - return null; - } - - public static RyaURI convertResource(Resource subject) { - if(subject == null) return null; - if (subject instanceof BNode) { - return new RyaURI(RyaSchema.BNODE_NAMESPACE + ((BNode) subject).getID()); - } - return convertURI((URI) subject); - } - - public static RyaStatement convertStatement(Statement statement) { - if (statement == null) return null; - Resource subject = statement.getSubject(); - URI predicate = statement.getPredicate(); - Value object = statement.getObject(); - Resource context = statement.getContext(); - return new RyaStatement( - convertResource(subject), - convertURI(predicate), - convertValue(object), - convertResource(context)); - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java deleted file mode 100644 index 2b97e1cc5..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java +++ /dev/null @@ -1,192 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.resolver.impl.BooleanRyaTypeResolver; -import mvm.rya.api.resolver.impl.ByteRyaTypeResolver; -import mvm.rya.api.resolver.impl.CustomDatatypeResolver; -import mvm.rya.api.resolver.impl.DateTimeRyaTypeResolver; -import mvm.rya.api.resolver.impl.DoubleRyaTypeResolver; -import mvm.rya.api.resolver.impl.FloatRyaTypeResolver; -import mvm.rya.api.resolver.impl.IntegerRyaTypeResolver; -import mvm.rya.api.resolver.impl.LongRyaTypeResolver; -import mvm.rya.api.resolver.impl.RyaTypeResolverImpl; -import mvm.rya.api.resolver.impl.RyaURIResolver; -import mvm.rya.api.resolver.impl.ServiceBackedRyaTypeResolverMappings; -import mvm.rya.api.resolver.impl.ShortRyaTypeResolver; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Date: 7/16/12 - * Time: 12:04 PM - */ -public class RyaContext { - - public Log logger = LogFactory.getLog(RyaContext.class); - - private Map uriToResolver = new HashMap(); - private Map byteToResolver = new HashMap(); - private RyaTypeResolver defaultResolver = new CustomDatatypeResolver(); - - private RyaContext() { - //add default - addDefaultMappings(); - } - - protected void addDefaultMappings() { - if (logger.isDebugEnabled()) { - logger.debug("Adding default mappings"); - } - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new RyaTypeResolverImpl())); // plain string - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new RyaURIResolver())); // uri - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new DateTimeRyaTypeResolver())); // dateTime - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new DoubleRyaTypeResolver())); // double - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new FloatRyaTypeResolver())); // float - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new IntegerRyaTypeResolver())); // integer - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new ShortRyaTypeResolver())); // short - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new LongRyaTypeResolver())); // long - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new BooleanRyaTypeResolver())); // boolean - addRyaTypeResolverMapping(new RyaTypeResolverMapping(new ByteRyaTypeResolver())); // byte - - //int is integer - uriToResolver.put(XMLSchema.INT, new IntegerRyaTypeResolver()); - - //add service loaded mappings - addRyaTypeResolverMappings(new ServiceBackedRyaTypeResolverMappings().getResolvers()); - } - - private static class RyaContextHolder { - public static final RyaContext INSTANCE = new RyaContext(); - } - - public synchronized static RyaContext getInstance() { - return RyaContextHolder.INSTANCE; - } - - - //need to go from datatype->resolver - public RyaTypeResolver retrieveResolver(URI datatype) { - RyaTypeResolver ryaTypeResolver = uriToResolver.get(datatype); - if (ryaTypeResolver == null) return defaultResolver; - return ryaTypeResolver; - } - - //need to go from byte->resolver - public RyaTypeResolver retrieveResolver(byte markerByte) { - RyaTypeResolver ryaTypeResolver = byteToResolver.get(markerByte); - if (ryaTypeResolver == null) return defaultResolver; - return ryaTypeResolver; - } - - public byte[] serialize(RyaType ryaType) throws RyaTypeResolverException { - RyaTypeResolver ryaTypeResolver = retrieveResolver(ryaType.getDataType()); - if (ryaTypeResolver != null) { - return ryaTypeResolver.serialize(ryaType); - } - return null; - } - - public byte[][] serializeType(RyaType ryaType) throws RyaTypeResolverException { - RyaTypeResolver ryaTypeResolver = retrieveResolver(ryaType.getDataType()); - if (ryaTypeResolver != null) { - return ryaTypeResolver.serializeType(ryaType); - } - return null; - } - - public RyaType deserialize(byte[] bytes) throws RyaTypeResolverException { - RyaTypeResolver ryaTypeResolver = retrieveResolver(bytes[bytes.length - 1]); - if (ryaTypeResolver != null) { - return ryaTypeResolver.deserialize(bytes); - } - return null; - } - - public void addRyaTypeResolverMapping(RyaTypeResolverMapping mapping) { - if (!uriToResolver.containsKey(mapping.getRyaDataType())) { - if (logger.isDebugEnabled()) { - logger.debug("addRyaTypeResolverMapping uri:[" + mapping.getRyaDataType() + "] byte:[" + mapping.getMarkerByte() + "] for mapping[" + mapping + "]"); - } - uriToResolver.put(mapping.getRyaDataType(), mapping.getRyaTypeResolver()); - byteToResolver.put(mapping.getMarkerByte(), mapping.getRyaTypeResolver()); - } else { - logger.warn("Could not add ryaType mapping because one already exists. uri:[" + mapping.getRyaDataType() + "] byte:[" + mapping.getMarkerByte() + "] for mapping[" + mapping + "]"); - } - } - - public void addRyaTypeResolverMappings(List mappings) { - for (RyaTypeResolverMapping mapping : mappings) { - addRyaTypeResolverMapping(mapping); - } - } - - public RyaTypeResolver removeRyaTypeResolver(URI dataType) { - RyaTypeResolver ryaTypeResolver = uriToResolver.remove(dataType); - if (ryaTypeResolver != null) { - if (logger.isDebugEnabled()) { - logger.debug("Removing ryaType Resolver uri[" + dataType + "] + [" + ryaTypeResolver + "]"); - } - byteToResolver.remove(ryaTypeResolver.getMarkerByte()); - return ryaTypeResolver; - } - return null; - } - - public RyaTypeResolver removeRyaTypeResolver(byte markerByte) { - RyaTypeResolver ryaTypeResolver = byteToResolver.remove(markerByte); - if (ryaTypeResolver != null) { - if (logger.isDebugEnabled()) { - logger.debug("Removing ryaType Resolver byte[" + markerByte + "] + [" + ryaTypeResolver + "]"); - } - uriToResolver.remove(ryaTypeResolver.getRyaDataType()); - return ryaTypeResolver; - } - return null; - } - - //transform range - public RyaRange transformRange(RyaRange range) throws RyaTypeResolverException { - RyaTypeResolver ryaTypeResolver = retrieveResolver(range.getStart().getDataType()); - if (ryaTypeResolver != null) { - return ryaTypeResolver.transformRange(range); - } - return range; - } - - public RyaTypeResolver getDefaultResolver() { - return defaultResolver; - } - - public void setDefaultResolver(RyaTypeResolver defaultResolver) { - this.defaultResolver = defaultResolver; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java deleted file mode 100644 index a30d250cb..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java +++ /dev/null @@ -1,75 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import org.openrdf.model.Literal; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Date: 7/17/12 - * Time: 8:34 AM - */ -public class RyaToRdfConversions { - - public static URI convertURI(RyaURI uri) { - return new URIImpl(uri.getData()); - } - - public static Literal convertLiteral(RyaType literal) { - if (XMLSchema.STRING.equals(literal.getDataType())) { - return new LiteralImpl(literal.getData()); - } else { - return new LiteralImpl(literal.getData(), literal.getDataType()); - } - //TODO: No Language support yet - } - - public static Value convertValue(RyaType value) { - //assuming either uri or Literal here - return (value instanceof RyaURI) ? convertURI((RyaURI) value) : convertLiteral(value); - } - - public static Statement convertStatement(RyaStatement statement) { - assert statement != null; - if (statement.getContext() != null) { - return new ContextStatementImpl(convertURI(statement.getSubject()), - convertURI(statement.getPredicate()), - convertValue(statement.getObject()), - convertURI(statement.getContext())); - } else { - return new StatementImpl(convertURI(statement.getSubject()), - convertURI(statement.getPredicate()), - convertValue(statement.getObject())); - } - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java deleted file mode 100644 index b3c244e30..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java +++ /dev/null @@ -1,123 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.query.strategy.TriplePatternStrategy; -import mvm.rya.api.query.strategy.wholerow.HashedPoWholeRowTriplePatternStrategy; -import mvm.rya.api.query.strategy.wholerow.HashedSpoWholeRowTriplePatternStrategy; -import mvm.rya.api.query.strategy.wholerow.OspWholeRowTriplePatternStrategy; -import mvm.rya.api.query.strategy.wholerow.PoWholeRowTriplePatternStrategy; -import mvm.rya.api.query.strategy.wholerow.SpoWholeRowTriplePatternStrategy; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolver; -import mvm.rya.api.resolver.triple.TripleRowResolverException; -import mvm.rya.api.resolver.triple.impl.WholeRowHashedTripleResolver; -import mvm.rya.api.resolver.triple.impl.WholeRowTripleResolver; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -/** - * Date: 7/16/12 - * Time: 12:04 PM - */ -public class RyaTripleContext { - - public Log logger = LogFactory.getLog(RyaTripleContext.class); - private TripleRowResolver tripleResolver; - private List triplePatternStrategyList = new ArrayList(); - - private RyaTripleContext(boolean addPrefixHash) { - addDefaultTriplePatternStrategies(addPrefixHash); - if (addPrefixHash){ - tripleResolver = new WholeRowHashedTripleResolver(); - } - else { - tripleResolver = new WholeRowTripleResolver(); - } - } - - - private static class RyaTripleContextHolder { - // TODO want to be able to support more variability in configuration here - public static final RyaTripleContext INSTANCE = new RyaTripleContext(false); - public static final RyaTripleContext HASHED_INSTANCE = new RyaTripleContext(true); - } - - public synchronized static RyaTripleContext getInstance(RdfCloudTripleStoreConfiguration conf) { - if (conf.isPrefixRowsWithHash()){ - return RyaTripleContextHolder.HASHED_INSTANCE; - } - return RyaTripleContextHolder.INSTANCE; - } - - - public Map serializeTriple(RyaStatement statement) throws TripleRowResolverException { - return getTripleResolver().serialize(statement); - } - - public RyaStatement deserializeTriple(RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout, TripleRow tripleRow) throws TripleRowResolverException { - return getTripleResolver().deserialize(table_layout, tripleRow); - } - - protected void addDefaultTriplePatternStrategies(boolean addPrefixHash) { - if (addPrefixHash){ - triplePatternStrategyList.add(new HashedSpoWholeRowTriplePatternStrategy()); - triplePatternStrategyList.add(new HashedPoWholeRowTriplePatternStrategy()); - } - else { - triplePatternStrategyList.add(new SpoWholeRowTriplePatternStrategy()); - triplePatternStrategyList.add(new PoWholeRowTriplePatternStrategy()); - } - triplePatternStrategyList.add(new OspWholeRowTriplePatternStrategy()); - } - - //retrieve triple pattern strategy - public TriplePatternStrategy retrieveStrategy(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) { - for (TriplePatternStrategy strategy : triplePatternStrategyList) { - if (strategy.handles(subject, predicate, object, context)) - return strategy; - } - return null; - } - - public TriplePatternStrategy retrieveStrategy(RyaStatement stmt) { - return retrieveStrategy(stmt.getSubject(), stmt.getPredicate(), stmt.getObject(), stmt.getContext()); - } - - public TripleRowResolver getTripleResolver() { - return tripleResolver; - } - - public void setTripleResolver(TripleRowResolver tripleResolver) { - this.tripleResolver = tripleResolver; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java deleted file mode 100644 index 5b1cd20a5..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java +++ /dev/null @@ -1,60 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaTypeRange; -import org.openrdf.model.URI; - -/** - * Date: 7/16/12 - * Time: 12:08 PM - */ -public interface RyaTypeResolver { - public byte[] serialize(RyaType ryaType) throws RyaTypeResolverException; - public byte[][] serializeType(RyaType ryaType) throws RyaTypeResolverException; - - public RyaType deserialize(byte[] bytes) throws RyaTypeResolverException; - - public RyaType newInstance(); - - /** - * @param bytes - * @return true if this byte[] is deserializable by this resolver - */ - public boolean deserializable(byte[] bytes); - - public URI getRyaDataType(); - - byte getMarkerByte(); - - /** - * This will allow a resolver to modify a range. For example, a date time resolver, with a reverse index, - * might want to reverse the start and stop - * - * @return - * @throws RyaTypeResolverException - */ - public RyaRange transformRange(RyaRange ryaRange) throws RyaTypeResolverException; - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java deleted file mode 100644 index 45f874c94..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java +++ /dev/null @@ -1,43 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Date: 7/16/12 - * Time: 12:09 PM - */ -public class RyaTypeResolverException extends Exception { - public RyaTypeResolverException() { - } - - public RyaTypeResolverException(String s) { - super(s); - } - - public RyaTypeResolverException(String s, Throwable throwable) { - super(s, throwable); - } - - public RyaTypeResolverException(Throwable throwable) { - super(throwable); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java deleted file mode 100644 index 0c7a30a62..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java +++ /dev/null @@ -1,57 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.URI; - -/** - * Date: 7/16/12 - * Time: 12:11 PM - */ -public class RyaTypeResolverMapping { - - protected RyaTypeResolver ryaTypeResolver; - - public RyaTypeResolverMapping() { - } - - public RyaTypeResolverMapping(RyaTypeResolver ryaTypeResolver) { - this.ryaTypeResolver = ryaTypeResolver; - } - - public void setRyaTypeResolver(RyaTypeResolver ryaTypeResolver) { - this.ryaTypeResolver = ryaTypeResolver; - } - - public RyaTypeResolver getRyaTypeResolver() { - return ryaTypeResolver; - } - - public URI getRyaDataType() { - return ryaTypeResolver.getRyaDataType(); - } - - byte getMarkerByte() { - return ryaTypeResolver.getMarkerByte(); - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java deleted file mode 100644 index f5de2def2..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java +++ /dev/null @@ -1,61 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.openrdf.model.vocabulary.XMLSchema; - -public class BooleanRyaTypeResolver extends RyaTypeResolverImpl { - public static final int BOOLEAN_LITERAL_MARKER = 10; - public static final TypeEncoder BOOLEAN_TYPE_ENCODER = LexiTypeEncoders - .booleanEncoder(); - - public BooleanRyaTypeResolver() { - super((byte) BOOLEAN_LITERAL_MARKER, XMLSchema.BOOLEAN); - } - - @Override - protected String serializeData(String data) throws - RyaTypeResolverException { - try { - boolean value = Boolean.parseBoolean(data); - return BOOLEAN_TYPE_ENCODER.encode(value); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) throws RyaTypeResolverException { - try { - return BOOLEAN_TYPE_ENCODER.decode(value).toString(); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java deleted file mode 100644 index 4b806797f..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java +++ /dev/null @@ -1,63 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.openrdf.model.vocabulary.XMLSchema; - -public class ByteRyaTypeResolver extends RyaTypeResolverImpl { - public static final int LITERAL_MARKER = 9; - public static final TypeEncoder BYTE_STRING_TYPE_ENCODER = LexiTypeEncoders - .byteEncoder(); - - public ByteRyaTypeResolver() { - super((byte) LITERAL_MARKER, XMLSchema.BYTE); - } - - @Override - protected String serializeData(String data) throws RyaTypeResolverException { - try { - Byte value = Byte.parseByte(data); - return BYTE_STRING_TYPE_ENCODER.encode(value); - } catch (NumberFormatException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) throws RyaTypeResolverException { - try { - return BYTE_STRING_TYPE_ENCODER.decode(value).toString(); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java deleted file mode 100644 index ae93e0f3f..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java +++ /dev/null @@ -1,70 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.primitives.Bytes; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.openrdf.model.impl.URIImpl; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES; - -/** - * Date: 7/16/12 - * Time: 1:12 PM - */ -public class CustomDatatypeResolver extends RyaTypeResolverImpl { - public static final int DT_LITERAL_MARKER = 8; - - public CustomDatatypeResolver() { - super((byte) DT_LITERAL_MARKER, null); - } - - @Override - public byte[][] serializeType(RyaType ryaType) throws RyaTypeResolverException { - byte[] bytes = serializeData(ryaType.getData()).getBytes(); - return new byte[][]{bytes, Bytes.concat(TYPE_DELIM_BYTES, ryaType.getDataType().stringValue().getBytes(), TYPE_DELIM_BYTES, markerBytes)}; - } - - @Override - public byte[] serialize(RyaType ryaType) throws RyaTypeResolverException { - byte[][] bytes = serializeType(ryaType); - return Bytes.concat(bytes[0], bytes[1]); - } - - @Override - public RyaType deserialize(byte[] bytes) throws RyaTypeResolverException { - if (!deserializable(bytes)) { - throw new RyaTypeResolverException("Bytes not deserializable"); - } - RyaType rt = newInstance(); - int length = bytes.length; - int indexOfType = Bytes.indexOf(bytes, TYPE_DELIM_BYTE); - if (indexOfType < 1) { - throw new RyaTypeResolverException("Not a datatype literal"); - } - String label = deserializeData(new String(bytes, 0, indexOfType)); - rt.setDataType(new URIImpl(new String(bytes, indexOfType + 1, (length - indexOfType) - 3))); - rt.setData(label); - return rt; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java deleted file mode 100644 index cb3e7cf26..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java +++ /dev/null @@ -1,76 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.openrdf.model.vocabulary.XMLSchema; - -import java.util.Date; - -/** - * Reverse index xml datetime strings - *

- * Date: 7/13/12 - * Time: 7:33 AM - */ -public class DateTimeRyaTypeResolver extends RyaTypeResolverImpl { - public static final int DATETIME_LITERAL_MARKER = 7; - public static final TypeEncoder DATE_STRING_TYPE_ENCODER = LexiTypeEncoders.dateEncoder(); - public static final DateTimeFormatter XMLDATETIME_PARSER = org.joda.time.format.ISODateTimeFormat.dateTimeParser(); - public static final DateTimeFormatter UTC_XMLDATETIME_FORMATTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC); - - - public DateTimeRyaTypeResolver() { - super((byte) DATETIME_LITERAL_MARKER, XMLSchema.DATETIME); - } - - @Override - protected String serializeData(String data) throws RyaTypeResolverException { - try { - DateTime dateTime = DateTime.parse(data, XMLDATETIME_PARSER); - Date value = dateTime.toDate(); - return DATE_STRING_TYPE_ENCODER.encode(value); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) throws RyaTypeResolverException { - try { - Date date = DATE_STRING_TYPE_ENCODER.decode(value); - return UTC_XMLDATETIME_FORMATTER.print(date.getTime()); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java deleted file mode 100644 index 88daa0f0f..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java +++ /dev/null @@ -1,68 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.openrdf.model.vocabulary.XMLSchema; - -import java.text.DecimalFormat; - -/** - * Date: 7/20/12 - * Time: 9:33 AM - */ -public class DoubleRyaTypeResolver extends RyaTypeResolverImpl { - public static final int DOUBLE_LITERAL_MARKER = 6; - public static final TypeEncoder DOUBLE_TYPE_ENCODER = LexiTypeEncoders.doubleEncoder(); - - public DoubleRyaTypeResolver() { - super((byte) DOUBLE_LITERAL_MARKER, XMLSchema.DOUBLE); - } - - @Override - protected String serializeData(String data) throws RyaTypeResolverException { - try { - double value = Double.parseDouble(data); - return DOUBLE_TYPE_ENCODER.encode(value); - } catch (NumberFormatException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) throws RyaTypeResolverException { - try { - return DOUBLE_TYPE_ENCODER.decode(value).toString(); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java deleted file mode 100644 index 2969a4b59..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java +++ /dev/null @@ -1,64 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - */ -public class FloatRyaTypeResolver extends RyaTypeResolverImpl { - public static final int FLOAT_LITERAL_MARKER = 11; - public static final TypeEncoder FLOAT_TYPE_ENCODER = LexiTypeEncoders.floatEncoder(); - - public FloatRyaTypeResolver() { - super((byte) FLOAT_LITERAL_MARKER, XMLSchema.FLOAT); - } - - @Override - protected String serializeData(String data) throws RyaTypeResolverException { - try { - float value = Float.parseFloat(data); - return FLOAT_TYPE_ENCODER.encode(value); - } catch (NumberFormatException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) throws RyaTypeResolverException { - try { - return FLOAT_TYPE_ENCODER.decode(value).toString(); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java deleted file mode 100644 index 2f6c7271f..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java +++ /dev/null @@ -1,67 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Date: 7/20/12 - * Time: 10:13 AM - */ -public class IntegerRyaTypeResolver extends RyaTypeResolverImpl { - public static final int INTEGER_LITERAL_MARKER = 5; - public static final TypeEncoder INTEGER_STRING_TYPE_ENCODER = LexiTypeEncoders - .integerEncoder(); - - public IntegerRyaTypeResolver() { - super((byte) INTEGER_LITERAL_MARKER, XMLSchema.INTEGER); - } - - @Override - protected String serializeData(String data) throws - RyaTypeResolverException { - try { - return INTEGER_STRING_TYPE_ENCODER.encode(Integer.parseInt(data)); - } catch (NumberFormatException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) throws RyaTypeResolverException { - try { - return INTEGER_STRING_TYPE_ENCODER.decode(value).toString(); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java deleted file mode 100644 index e073495e2..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java +++ /dev/null @@ -1,68 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Date: 7/20/12 - * Time: 10:13 AM - */ -public class LongRyaTypeResolver extends RyaTypeResolverImpl { - public static final int LONG_LITERAL_MARKER = 4; - public static final TypeEncoder LONG_STRING_TYPE_ENCODER = LexiTypeEncoders - .longEncoder(); - - public LongRyaTypeResolver() { - super((byte) LONG_LITERAL_MARKER, XMLSchema.LONG); - } - - @Override - protected String serializeData(String data) throws - RyaTypeResolverException { - try { - return LONG_STRING_TYPE_ENCODER.encode(Long.parseLong(data)); - } catch (NumberFormatException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) - throws RyaTypeResolverException { - try { - return LONG_STRING_TYPE_ENCODER.decode(value).toString(); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java deleted file mode 100644 index 3f4d6b83a..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java +++ /dev/null @@ -1,124 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.primitives.Bytes; -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.resolver.RyaTypeResolver; -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.XMLSchema; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES; - -/** - * Date: 7/16/12 - * Time: 12:42 PM - */ -public class RyaTypeResolverImpl implements RyaTypeResolver { - public static final int PLAIN_LITERAL_MARKER = 3; - public static final TypeEncoder STRING_TYPE_ENCODER = LexiTypeEncoders - .stringEncoder(); - - protected byte markerByte; - protected URI dataType; - protected byte[] markerBytes; - - public RyaTypeResolverImpl() { - this((byte) PLAIN_LITERAL_MARKER, XMLSchema.STRING); - } - - public RyaTypeResolverImpl(byte markerByte, URI dataType) { - setMarkerByte(markerByte); - setRyaDataType(dataType); - } - - public void setMarkerByte(byte markerByte) { - this.markerByte = markerByte; - this.markerBytes = new byte[]{markerByte}; - } - - @Override - public byte getMarkerByte() { - return markerByte; - } - - @Override - public RyaRange transformRange(RyaRange ryaRange) throws RyaTypeResolverException { - return ryaRange; - } - - @Override - public byte[] serialize(RyaType ryaType) throws RyaTypeResolverException { - byte[][] bytes = serializeType(ryaType); - return Bytes.concat(bytes[0], bytes[1]); - } - - @Override - public byte[][] serializeType(RyaType ryaType) throws RyaTypeResolverException { - byte[] bytes = serializeData(ryaType.getData()).getBytes(); - return new byte[][]{bytes, Bytes.concat(TYPE_DELIM_BYTES, markerBytes)}; - } - - @Override - public URI getRyaDataType() { - return dataType; - } - - public void setRyaDataType(URI dataType) { - this.dataType = dataType; - } - - @Override - public RyaType newInstance() { - return new RyaType(); - } - - @Override - public boolean deserializable(byte[] bytes) { - return bytes != null && bytes.length >= 2 && bytes[bytes.length - 1] == getMarkerByte() && bytes[bytes.length - 2] == TYPE_DELIM_BYTE; - } - - protected String serializeData(String data) throws RyaTypeResolverException { - return STRING_TYPE_ENCODER.encode(data); - } - - @Override - public RyaType deserialize(byte[] bytes) throws RyaTypeResolverException { - if (!deserializable(bytes)) { - throw new RyaTypeResolverException("Bytes not deserializable"); - } - RyaType rt = newInstance(); - rt.setDataType(getRyaDataType()); - String data = new String(bytes, 0, bytes.length - 2); - rt.setData(deserializeData(data)); - return rt; - } - - protected String deserializeData(String data) throws RyaTypeResolverException { - return STRING_TYPE_ENCODER.decode(data); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java deleted file mode 100644 index 8f8bf008e..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java +++ /dev/null @@ -1,44 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Date: 7/16/12 - * Time: 12:41 PM - */ -public class RyaURIResolver extends RyaTypeResolverImpl { - - public static final int URI_MARKER = 2; - - public RyaURIResolver() { - super((byte) URI_MARKER, XMLSchema.ANYURI); - } - - @Override - public RyaType newInstance() { - return new RyaURI(); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java deleted file mode 100644 index ce3f05b1b..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java +++ /dev/null @@ -1,45 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolver; -import mvm.rya.api.resolver.RyaTypeResolverMapping; - -import java.util.ArrayList; -import java.util.List; -import java.util.ServiceLoader; - -/** - * Date: 8/29/12 - * Time: 2:04 PM - */ -public class ServiceBackedRyaTypeResolverMappings { - - public List getResolvers() { - ServiceLoader loader = ServiceLoader.load(RyaTypeResolver.class); - List resolvers = new ArrayList(); - for (RyaTypeResolver aLoader : loader) { - resolvers.add(new RyaTypeResolverMapping(aLoader)); - } - return resolvers; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java deleted file mode 100644 index dba9773ef..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java +++ /dev/null @@ -1,65 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.RyaTypeResolverException; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; -import org.calrissian.mango.types.exception.TypeDecodingException; -import org.calrissian.mango.types.exception.TypeEncodingException; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - */ -public class ShortRyaTypeResolver extends RyaTypeResolverImpl { - public static final int INTEGER_LITERAL_MARKER = 12; - public static final TypeEncoder INTEGER_STRING_TYPE_ENCODER = LexiTypeEncoders - .integerEncoder(); - - public ShortRyaTypeResolver() { - super((byte) INTEGER_LITERAL_MARKER, XMLSchema.SHORT); - } - - @Override - protected String serializeData(String data) throws - RyaTypeResolverException { - try { - return INTEGER_STRING_TYPE_ENCODER.encode(Integer.parseInt(data)); - } catch (NumberFormatException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } catch (TypeEncodingException e) { - throw new RyaTypeResolverException( - "Exception occurred serializing data[" + data + "]", e); - } - } - - @Override - protected String deserializeData(String value) throws RyaTypeResolverException { - try { - return INTEGER_STRING_TYPE_ENCODER.decode(value).toString(); - } catch (TypeDecodingException e) { - throw new RyaTypeResolverException( - "Exception occurred deserializing data[" + value + "]", e); - } - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java deleted file mode 100644 index f825e861f..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java +++ /dev/null @@ -1,107 +0,0 @@ -package mvm.rya.api.resolver.triple; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Arrays; - -/** - * Date: 7/13/12 - * Time: 8:54 AM - */ -public class TripleRow { - private byte[] row, columnFamily, columnQualifier, columnVisibility, value; - private Long timestamp; - - public TripleRow(byte[] row, byte[] columnFamily, byte[] columnQualifier) { - this(row, columnFamily, columnQualifier, null, null, null); - } - public TripleRow(byte[] row, byte[] columnFamily, byte[] columnQualifier, Long timestamp, - byte[] columnVisibility, byte[] value) { - this.row = row; - this.columnFamily = columnFamily; - this.columnQualifier = columnQualifier; - //Default TS to current time to ensure the timestamps on all the tables are the same for the same triple - this.timestamp = timestamp != null ? timestamp : System.currentTimeMillis(); - this.columnVisibility = columnVisibility; - this.value = value; - } - - public byte[] getRow() { - return row; - } - - public byte[] getColumnFamily() { - return columnFamily; - } - - public byte[] getColumnQualifier() { - return columnQualifier; - } - - public byte[] getColumnVisibility() { - return columnVisibility; - } - - public byte[] getValue() { - return value; - } - - public Long getTimestamp() { - return timestamp; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - TripleRow tripleRow = (TripleRow) o; - - if (!Arrays.equals(columnFamily, tripleRow.columnFamily)) return false; - if (!Arrays.equals(columnQualifier, tripleRow.columnQualifier)) return false; - if (!Arrays.equals(row, tripleRow.row)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = row != null ? Arrays.hashCode(row) : 0; - result = 31 * result + (columnFamily != null ? Arrays.hashCode(columnFamily) : 0); - result = 31 * result + (columnQualifier != null ? Arrays.hashCode(columnQualifier) : 0); - result = 31 * result + (columnVisibility != null ? Arrays.hashCode(columnVisibility) : 0); - result = 31 * result + (timestamp != null ? timestamp.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return "TripleRow{" + - "row=" + row + - ", columnFamily=" + columnFamily + - ", columnQualifier=" + columnQualifier + - ", columnVisibility=" + columnVisibility + - ", value=" + value + - ", timestamp=" + timestamp + - '}'; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java deleted file mode 100644 index 36d23df9a..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java +++ /dev/null @@ -1,84 +0,0 @@ -package mvm.rya.api.resolver.triple; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Arrays; - -/** - * Date: 7/13/12 - * Time: 8:54 AM - */ -public class TripleRowRegex { - private String row, columnFamily, columnQualifier; - - public TripleRowRegex(String row, String columnFamily, String columnQualifier) { - this.row = row; - this.columnFamily = columnFamily; - this.columnQualifier = columnQualifier; - } - - public String getRow() { - return row; - } - - public String getColumnFamily() { - return columnFamily; - } - - public String getColumnQualifier() { - return columnQualifier; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - TripleRowRegex that = (TripleRowRegex) o; - - if (columnFamily != null ? !columnFamily.equals(that.columnFamily) : that.columnFamily != null) return false; - if (columnQualifier != null ? !columnQualifier.equals(that.columnQualifier) : that.columnQualifier != null) - return false; - if (row != null ? !row.equals(that.row) : that.row != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = row != null ? row.hashCode() : 0; - result = 31 * result + (columnFamily != null ? columnFamily.hashCode() : 0); - result = 31 * result + (columnQualifier != null ? columnQualifier.hashCode() : 0); - return result; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("TripleRowRegex"); - sb.append("{row='").append(row).append('\''); - sb.append(", columnFamily='").append(columnFamily).append('\''); - sb.append(", columnQualifier='").append(columnQualifier).append('\''); - sb.append('}'); - return sb.toString(); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java deleted file mode 100644 index 2ccc98686..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java +++ /dev/null @@ -1,43 +0,0 @@ -package mvm.rya.api.resolver.triple; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; - -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; - -/** - * Date: 7/17/12 - * Time: 7:33 AM - */ -public interface TripleRowResolver { - - public Map serialize(RyaStatement statement) throws TripleRowResolverException; - - public RyaStatement deserialize(TABLE_LAYOUT table_layout, TripleRow tripleRow) throws TripleRowResolverException; - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java deleted file mode 100644 index d1824b19d..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java +++ /dev/null @@ -1,43 +0,0 @@ -package mvm.rya.api.resolver.triple; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Date: 7/17/12 - * Time: 7:35 AM - */ -public class TripleRowResolverException extends Exception { - public TripleRowResolverException() { - } - - public TripleRowResolverException(String s) { - super(s); - } - - public TripleRowResolverException(String s, Throwable throwable) { - super(s, throwable); - } - - public TripleRowResolverException(Throwable throwable) { - super(throwable); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java deleted file mode 100644 index 1fd3f1b0e..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java +++ /dev/null @@ -1,154 +0,0 @@ -package mvm.rya.api.resolver.triple.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.primitives.Bytes; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolver; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.*; - -/** - * Will store triple in spo, po, osp. Storing everything in the whole row. - * Date: 7/13/12 - * Time: 8:51 AM - */ -public class WholeRowHashedTripleResolver implements TripleRowResolver { - - @Override - public Map serialize(RyaStatement stmt) throws TripleRowResolverException { - try { - RyaURI subject = stmt.getSubject(); - RyaURI predicate = stmt.getPredicate(); - RyaType object = stmt.getObject(); - RyaURI context = stmt.getContext(); - Long timestamp = stmt.getTimestamp(); - byte[] columnVisibility = stmt.getColumnVisibility(); - String qualifer = stmt.getQualifer(); - byte[] qualBytes = qualifer == null ? EMPTY_BYTES : qualifer.getBytes(); - byte[] value = stmt.getValue(); - assert subject != null && predicate != null && object != null; - byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes(); - Map tripleRowMap = new HashMap(); - MessageDigest md = MessageDigest.getInstance("MD5"); - byte[] subjBytes = subject.getData().getBytes(); - byte[] subjHashBytes = md.digest(subjBytes); - byte[] predBytes = predicate.getData().getBytes(); - byte[] predHashBytes = md.digest(predBytes); - byte[][] objBytes = RyaContext.getInstance().serializeType(object); - tripleRowMap.put(TABLE_LAYOUT.SPO, - new TripleRow(Bytes.concat(subjHashBytes, DELIM_BYTES, subjBytes, DELIM_BYTES, - predBytes, DELIM_BYTES, - objBytes[0], objBytes[1]), cf, qualBytes, - timestamp, columnVisibility, value)); - tripleRowMap.put(TABLE_LAYOUT.PO, - new TripleRow(Bytes.concat(predHashBytes, DELIM_BYTES, predBytes, DELIM_BYTES, - objBytes[0], DELIM_BYTES, - subjBytes, objBytes[1]), cf, qualBytes, - timestamp, columnVisibility, value)); - tripleRowMap.put(TABLE_LAYOUT.OSP, - new TripleRow(Bytes.concat(objBytes[0], DELIM_BYTES, - subjBytes, DELIM_BYTES, - predBytes, objBytes[1]), cf, qualBytes, - timestamp, columnVisibility, value)); - return tripleRowMap; - } catch (RyaTypeResolverException e) { - throw new TripleRowResolverException(e); - } catch (NoSuchAlgorithmException e) { - throw new TripleRowResolverException(e); - } - } - - @Override - public RyaStatement deserialize(TABLE_LAYOUT table_layout, TripleRow tripleRow) throws TripleRowResolverException { - try { - assert tripleRow != null && table_layout != null; - byte[] row = tripleRow.getRow(); - - // if it is a hashed row, ony keep the row after the hash - if ((table_layout == TABLE_LAYOUT.SPO) || (table_layout == TABLE_LAYOUT.PO)) { - int hashStart = Bytes.indexOf(row, DELIM_BYTE); - row = Arrays.copyOfRange(row, hashStart + 1, row.length); - } - - int firstIndex = Bytes.indexOf(row, DELIM_BYTE); - byte[] first= Arrays.copyOf(row, firstIndex); - int secondIndex = Bytes.lastIndexOf(row, DELIM_BYTE); - int typeIndex = Bytes.indexOf(row, TYPE_DELIM_BYTE); - byte[] second = Arrays.copyOfRange(row, firstIndex + 1, secondIndex); - byte[] third = Arrays.copyOfRange(row, secondIndex + 1, typeIndex); - byte[] type = Arrays.copyOfRange(row, typeIndex, row.length); - byte[] columnFamily = tripleRow.getColumnFamily(); - boolean contextExists = columnFamily != null && columnFamily.length > 0; - RyaURI context = (contextExists) ? (new RyaURI(new String(columnFamily))) : null; - byte[] columnQualifier = tripleRow.getColumnQualifier(); - String qualifier = columnQualifier != null && columnQualifier.length > 0 ? new String(columnQualifier) : null; - Long timestamp = tripleRow.getTimestamp(); - byte[] columnVisibility = tripleRow.getColumnVisibility(); - byte[] value = tripleRow.getValue(); - - switch (table_layout) { - case SPO: { - byte[] obj = Bytes.concat(third, type); - return new RyaStatement( - new RyaURI(new String(first)), - new RyaURI(new String(second)), - RyaContext.getInstance().deserialize(obj), - context, qualifier, columnVisibility, value, timestamp); - } - case PO: { - byte[] obj = Bytes.concat(second, type); - return new RyaStatement( - new RyaURI(new String(third)), - new RyaURI(new String(first)), - RyaContext.getInstance().deserialize(obj), - context, qualifier, columnVisibility, value, timestamp); - } - case OSP: { - byte[] obj = Bytes.concat(first, type); - return new RyaStatement( - new RyaURI(new String(second)), - new RyaURI(new String(third)), - RyaContext.getInstance().deserialize(obj), - context, qualifier, columnVisibility, value, timestamp); - } - } - } catch (RyaTypeResolverException e) { - throw new TripleRowResolverException(e); - } - throw new TripleRowResolverException("TripleRow[" + tripleRow + "] with Table layout[" + table_layout + "] is not deserializable"); - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java deleted file mode 100644 index dc0695bf3..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java +++ /dev/null @@ -1,139 +0,0 @@ -package mvm.rya.api.resolver.triple.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.primitives.Bytes; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolver; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.*; - -/** - * Will store triple in spo, po, osp. Storing everything in the whole row. - * Date: 7/13/12 - * Time: 8:51 AM - */ -public class WholeRowTripleResolver implements TripleRowResolver { - - @Override - public Map serialize(RyaStatement stmt) throws TripleRowResolverException { - try { - RyaURI subject = stmt.getSubject(); - RyaURI predicate = stmt.getPredicate(); - RyaType object = stmt.getObject(); - RyaURI context = stmt.getContext(); - Long timestamp = stmt.getTimestamp(); - byte[] columnVisibility = stmt.getColumnVisibility(); - String qualifer = stmt.getQualifer(); - byte[] qualBytes = qualifer == null ? EMPTY_BYTES : qualifer.getBytes(); - byte[] value = stmt.getValue(); - assert subject != null && predicate != null && object != null; - byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes(); - Map tripleRowMap = new HashMap(); - byte[] subjBytes = subject.getData().getBytes(); - byte[] predBytes = predicate.getData().getBytes(); - byte[][] objBytes = RyaContext.getInstance().serializeType(object); - tripleRowMap.put(TABLE_LAYOUT.SPO, - new TripleRow(Bytes.concat(subjBytes, DELIM_BYTES, - predBytes, DELIM_BYTES, - objBytes[0], objBytes[1]), cf, qualBytes, - timestamp, columnVisibility, value)); - tripleRowMap.put(TABLE_LAYOUT.PO, - new TripleRow(Bytes.concat(predBytes, DELIM_BYTES, - objBytes[0], DELIM_BYTES, - subjBytes, objBytes[1]), cf, qualBytes, - timestamp, columnVisibility, value)); - tripleRowMap.put(TABLE_LAYOUT.OSP, - new TripleRow(Bytes.concat(objBytes[0], DELIM_BYTES, - subjBytes, DELIM_BYTES, - predBytes, objBytes[1]), cf, qualBytes, - timestamp, columnVisibility, value)); - return tripleRowMap; - } catch (RyaTypeResolverException e) { - throw new TripleRowResolverException(e); - } - } - - @Override - public RyaStatement deserialize(TABLE_LAYOUT table_layout, TripleRow tripleRow) throws TripleRowResolverException { - try { - assert tripleRow != null && table_layout != null; - byte[] row = tripleRow.getRow(); - int firstIndex = Bytes.indexOf(row, DELIM_BYTE); - int secondIndex = Bytes.lastIndexOf(row, DELIM_BYTE); - int typeIndex = Bytes.indexOf(row, TYPE_DELIM_BYTE); - byte[] first = Arrays.copyOf(row, firstIndex); - byte[] second = Arrays.copyOfRange(row, firstIndex + 1, secondIndex); - byte[] third = Arrays.copyOfRange(row, secondIndex + 1, typeIndex); - byte[] type = Arrays.copyOfRange(row, typeIndex, row.length); - byte[] columnFamily = tripleRow.getColumnFamily(); - boolean contextExists = columnFamily != null && columnFamily.length > 0; - RyaURI context = (contextExists) ? (new RyaURI(new String(columnFamily))) : null; - byte[] columnQualifier = tripleRow.getColumnQualifier(); - String qualifier = columnQualifier != null && columnQualifier.length > 0 ? new String(columnQualifier) : null; - Long timestamp = tripleRow.getTimestamp(); - byte[] columnVisibility = tripleRow.getColumnVisibility(); - byte[] value = tripleRow.getValue(); - - switch (table_layout) { - case SPO: { - byte[] obj = Bytes.concat(third, type); - return new RyaStatement( - new RyaURI(new String(first)), - new RyaURI(new String(second)), - RyaContext.getInstance().deserialize(obj), - context, qualifier, columnVisibility, value, timestamp); - } - case PO: { - byte[] obj = Bytes.concat(second, type); - return new RyaStatement( - new RyaURI(new String(third)), - new RyaURI(new String(first)), - RyaContext.getInstance().deserialize(obj), - context, qualifier, columnVisibility, value, timestamp); - } - case OSP: { - byte[] obj = Bytes.concat(first, type); - return new RyaStatement( - new RyaURI(new String(second)), - new RyaURI(new String(third)), - RyaContext.getInstance().deserialize(obj), - context, qualifier, columnVisibility, value, timestamp); - } - } - } catch (RyaTypeResolverException e) { - throw new TripleRowResolverException(e); - } - throw new TripleRowResolverException("TripleRow[" + tripleRow + "] with Table layout[" + table_layout + "] is not deserializable"); - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java b/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java deleted file mode 100644 index 61b14d95d..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java +++ /dev/null @@ -1,28 +0,0 @@ -package mvm.rya.api.security; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import javax.servlet.http.HttpServletRequest; - -public interface SecurityProvider { - - public String[] getUserAuths(HttpServletRequest incRequest); -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java b/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java deleted file mode 100644 index f3e547916..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java +++ /dev/null @@ -1,76 +0,0 @@ -package mvm.rya.api.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Iterator; -import java.util.NoSuchElementException; - -import org.calrissian.mango.collect.CloseableIterable; - -/** - * Date: 1/30/13 - * Time: 2:21 PM - */ -public class CloseableIterableIteration implements CloseableIteration { - - private CloseableIterable closeableIterable; - private final Iterator iterator; - - private boolean isClosed = false; - - public CloseableIterableIteration(CloseableIterable closeableIterable) { - this.closeableIterable = closeableIterable; - iterator = closeableIterable.iterator(); - } - - @Override - public void close() throws X { - try { - isClosed = true; - closeableIterable.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public boolean hasNext() throws X { - return iterator.hasNext(); - } - - @Override - public T next() throws X { - if (!hasNext() || isClosed) { - throw new NoSuchElementException(); - } - - return iterator.next(); - } - - @Override - public void remove() throws X { - iterator.remove(); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java b/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java deleted file mode 100644 index b098e52f2..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java +++ /dev/null @@ -1,58 +0,0 @@ -package mvm.rya.api.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Enumeration; - -/** - * Date: 7/26/12 - * Time: 9:12 AM - */ -public class EnumerationWrapper implements CloseableIteration { - private Enumeration enumeration; - - public EnumerationWrapper(Enumeration enumeration) { - this.enumeration = enumeration; - } - - @Override - public void close() throws X { - //nothing - } - - @Override - public boolean hasNext() throws X { - return enumeration.hasMoreElements(); - } - - @Override - public E next() throws X { - return enumeration.nextElement(); - } - - @Override - public void remove() throws X { - enumeration.nextElement(); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java b/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java deleted file mode 100644 index 86748f926..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java +++ /dev/null @@ -1,58 +0,0 @@ -package mvm.rya.api.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Iterator; - -/** - * Date: 7/26/12 - * Time: 9:12 AM - */ -public class IteratorWrapper implements CloseableIteration { - private Iterator iterator; - - public IteratorWrapper(Iterator iterator) { - this.iterator = iterator; - } - - @Override - public void close() throws X { - //nothing - } - - @Override - public boolean hasNext() throws X { - return iterator.hasNext(); - } - - @Override - public E next() throws X { - return iterator.next(); - } - - @Override - public void remove() throws X { - iterator.remove(); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java b/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java deleted file mode 100644 index dfa17e86a..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java +++ /dev/null @@ -1,105 +0,0 @@ -package mvm.rya.api.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; - -/** - * Class NullableStatementImpl - * Date: Feb 23, 2011 - * Time: 10:37:34 AM - */ -public class NullableStatementImpl implements Statement { - - private Resource subject; - private URI predicate; - private Value object; - private Resource[] contexts; - - public NullableStatementImpl(Resource subject, URI predicate, Value object, Resource... contexts) { - this.subject = subject; - this.predicate = predicate; - this.object = object; - this.contexts = contexts; - } - - @Override - public int hashCode() { - return 961 * ((this.getSubject() == null) ? (0) : (this.getSubject().hashCode())) + - 31 * ((this.getPredicate() == null) ? (0) : (this.getPredicate().hashCode())) + - ((this.getObject() == null) ? (0) : (this.getObject().hashCode())); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(256); - sb.append("("); - sb.append(getSubject()); - sb.append(", "); - sb.append(getPredicate()); - sb.append(", "); - sb.append(getObject()); - sb.append(")"); - return sb.toString(); - } - - @Override - public boolean equals(Object other) { - if (this == other) - return true; - if (other instanceof Statement) { - Statement otherSt = (Statement) other; - return this.hashCode() == otherSt.hashCode(); - } else { - return false; - } - } - - public Value getObject() { - return object; - } - - public URI getPredicate() { - return predicate; - } - - public Resource getSubject() { - return subject; - } - - public Resource getContext() { - if (contexts == null || contexts.length == 0) - return null; - else return contexts[0]; - } - - public Resource[] getContexts() { - return contexts; - } - - public void setContexts(Resource[] contexts) { - this.contexts = contexts; - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java b/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java deleted file mode 100644 index 297c95036..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java +++ /dev/null @@ -1,74 +0,0 @@ -package mvm.rya.api.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import info.aduna.iteration.CloseableIteration; - -/** - * Date: 7/24/12 - * Time: 4:40 PM - */ -public class PeekingCloseableIteration implements CloseableIteration { - - private final CloseableIteration iteration; - private boolean hasPeeked; - private E peekedElement; - - public PeekingCloseableIteration(CloseableIteration iteration) { - this.iteration = Preconditions.checkNotNull(iteration); - } - - @Override - public void close() throws X { - iteration.close(); - } - - public boolean hasNext() throws X { - return hasPeeked || iteration.hasNext(); - } - - public E next() throws X { - if (!hasPeeked) { - return iteration.next(); - } else { - E result = peekedElement; - hasPeeked = false; - peekedElement = null; - return result; - } - } - - public void remove() throws X { - Preconditions.checkState(!hasPeeked, "Can't remove after you've peeked at next"); - iteration.remove(); - } - - public E peek() throws X { - if (!hasPeeked) { - peekedElement = iteration.next(); - hasPeeked = true; - } - return peekedElement; - } - -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java b/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java deleted file mode 100644 index 0fc2a7fc1..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java +++ /dev/null @@ -1,40 +0,0 @@ -package mvm.rya.api.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Function; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import org.openrdf.query.BindingSet; - -import java.util.Map; - -/** - * Date: 1/18/13 - * Time: 1:25 PM - */ -public class RyaStatementAddBindingSetFunction implements Function> { - @Override - public Map.Entry apply(RyaStatement ryaStatement) { - return new RdfCloudTripleStoreUtils.CustomEntry(ryaStatement, null); - } -} diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java b/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java deleted file mode 100644 index b39fafe66..000000000 --- a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java +++ /dev/null @@ -1,61 +0,0 @@ -package mvm.rya.api.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import org.openrdf.query.BindingSet; - -import java.util.Map; - -/** - * Date: 1/18/13 - * Time: 1:22 PM - */ -public class RyaStatementRemoveBindingSetCloseableIteration implements CloseableIteration{ - - private CloseableIteration, RyaDAOException> iter; - - public RyaStatementRemoveBindingSetCloseableIteration(CloseableIteration, RyaDAOException> iter) { - this.iter = iter; - } - - @Override - public void close() throws RyaDAOException { - iter.close(); - } - - @Override - public boolean hasNext() throws RyaDAOException { - return iter.hasNext(); - } - - @Override - public RyaStatement next() throws RyaDAOException { - return iter.next().getKey(); - } - - @Override - public void remove() throws RyaDAOException { - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java b/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java deleted file mode 100644 index 396667923..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java +++ /dev/null @@ -1,37 +0,0 @@ -package mvm.rya.api.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; - -/** - * Date: 7/24/12 - * Time: 3:30 PM - */ -public class RyaURIPrefixTest extends TestCase { - - public void testPrefix() throws Exception { - String prefix = "urn:test#"; - RyaURIPrefix uriPrefix = new RyaURIPrefix(prefix); - assertEquals(prefix, uriPrefix.getPrefix()); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java b/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java deleted file mode 100644 index 40a9c689c..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java +++ /dev/null @@ -1,63 +0,0 @@ -package mvm.rya.api.persist.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import org.junit.Test; - -import java.util.Arrays; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -/** - */ -public class RyaQueryTest { - - @Test - public void testBuildQueryWithOptions() { - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaStatement ryaStatement = new RyaStatement(subj, pred, obj); - String[] auths = {"U,FOUO"}; - long currentTime = System.currentTimeMillis(); - RyaQuery ryaQuery = RyaQuery.builder(ryaStatement).setAuths(auths).setNumQueryThreads(4).setRegexObject("regexObj") - .setRegexPredicate("regexPred").setRegexSubject("regexSubj").setTtl(100l).setBatchSize(10). - setCurrentTime(currentTime).setMaxResults(1000l) - .build(); - - assertNotNull(ryaQuery); - assertEquals(ryaStatement, ryaQuery.getQuery()); - assertEquals(4, (int) ryaQuery.getNumQueryThreads()); - assertEquals("regexObj", ryaQuery.getRegexObject()); - assertEquals("regexPred", ryaQuery.getRegexPredicate()); - assertEquals("regexSubj", ryaQuery.getRegexSubject()); - assertEquals(100l, (long) ryaQuery.getTtl()); - assertEquals(10, (int) ryaQuery.getBatchSize()); - assertEquals(currentTime, (long) ryaQuery.getCurrentTime()); - assertEquals(1000l, (long) ryaQuery.getMaxResults()); - assertTrue(Arrays.equals(auths, ryaQuery.getAuths())); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java deleted file mode 100644 index 919e4cc1c..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java +++ /dev/null @@ -1,192 +0,0 @@ -package mvm.rya.api.query.strategy; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO; - -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import junit.framework.TestCase; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.query.strategy.wholerow.OspWholeRowTriplePatternStrategy; -import mvm.rya.api.query.strategy.wholerow.PoWholeRowTriplePatternStrategy; -import mvm.rya.api.query.strategy.wholerow.SpoWholeRowTriplePatternStrategy; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowRegex; -import mvm.rya.api.resolver.triple.impl.WholeRowTripleResolver; - -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Date: 7/25/12 - * Time: 11:41 AM - */ -public class AbstractTriplePatternStrategyTest extends TestCase { - public class MockRdfConfiguration extends RdfCloudTripleStoreConfiguration { - - @Override - public RdfCloudTripleStoreConfiguration clone() { - return new MockRdfConfiguration(); - } - - } - - public void testRegex() throws Exception { - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaStatement ryaStatement = new RyaStatement(subj, pred, obj); - Map serialize = new WholeRowTripleResolver().serialize(ryaStatement); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - String row = new String(tripleRow.getRow()); - TriplePatternStrategy spoStrategy = new SpoWholeRowTriplePatternStrategy(); - TriplePatternStrategy poStrategy = new PoWholeRowTriplePatternStrategy(); - TriplePatternStrategy ospStrategy = new OspWholeRowTriplePatternStrategy(); - //pred - TripleRowRegex tripleRowRegex = spoStrategy.buildRegex(null, pred.getData(), null, null, null); - Pattern p = Pattern.compile(tripleRowRegex.getRow()); - Matcher matcher = p.matcher(row); - assertTrue(matcher.matches()); - //subj - tripleRowRegex = spoStrategy.buildRegex(subj.getData(), null, null, null, null); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertTrue(matcher.matches()); - //obj - tripleRowRegex = spoStrategy.buildRegex(null, null, obj.getData(), null, null); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertTrue(matcher.matches()); - - //po table - row = new String(serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO).getRow()); - tripleRowRegex = poStrategy.buildRegex(null, pred.getData(), null, null, null); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertTrue(matcher.matches()); - - tripleRowRegex = poStrategy.buildRegex(null, pred.getData(), obj.getData(), null, null); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertTrue(matcher.matches()); - - tripleRowRegex = poStrategy.buildRegex(subj.getData(), pred.getData(), obj.getData(), null, null); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertTrue(matcher.matches()); - - //various regex - tripleRowRegex = poStrategy.buildRegex(null, "urn:test#pr[e|d]{2}", null, null, null); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertTrue(matcher.matches()); - - //does not match - tripleRowRegex = poStrategy.buildRegex(null, "hello", null, null, null); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertFalse(matcher.matches()); - } - - public void testObjectTypeInfo() throws Exception { - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaType obj = new RyaType(XMLSchema.LONG, "10"); - RyaStatement ryaStatement = new RyaStatement(subj, pred, obj); - Map serialize = RyaTripleContext.getInstance(new MockRdfConfiguration()).serializeTriple(ryaStatement); - TripleRow tripleRow = serialize.get(SPO); - - String row = new String(tripleRow.getRow()); - TriplePatternStrategy spoStrategy = new SpoWholeRowTriplePatternStrategy(); - //obj - byte[][] bytes = RyaContext.getInstance().serializeType(obj); - String objStr = new String(bytes[0]); - byte[] objectTypeInfo = bytes[1]; - TripleRowRegex tripleRowRegex = spoStrategy.buildRegex(null, null, - objStr - , null, objectTypeInfo); - Pattern p = Pattern.compile(tripleRowRegex.getRow()); - Matcher matcher = p.matcher(row); - assertTrue(matcher.matches()); - - //build row with same object str data - Map dupTriple_str = RyaTripleContext.getInstance(new MockRdfConfiguration()).serializeTriple( - new RyaStatement(subj, pred, new RyaType(XMLSchema.STRING, objStr)) - ); - TripleRow tripleRow_dup_str = dupTriple_str.get(SPO); - - row = new String(tripleRow_dup_str.getRow()); - spoStrategy = new SpoWholeRowTriplePatternStrategy(); - - tripleRowRegex = spoStrategy.buildRegex(null, null, - objStr - , null, objectTypeInfo); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(row); - assertFalse(matcher.matches()); - - //po table - TriplePatternStrategy poStrategy = new PoWholeRowTriplePatternStrategy(); - tripleRowRegex = poStrategy.buildRegex(null, null, - objStr - , null, objectTypeInfo); - p = Pattern.compile(tripleRowRegex.getRow()); - String po_row = new String(serialize.get(PO).getRow()); - matcher = p.matcher(po_row); - assertTrue(matcher.matches()); - - tripleRowRegex = poStrategy.buildRegex(null, null, - objStr - , null, objectTypeInfo); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(new String(dupTriple_str.get(PO).getRow())); - assertFalse(matcher.matches()); - - //osp table - TriplePatternStrategy ospStrategy = new OspWholeRowTriplePatternStrategy(); - tripleRowRegex = ospStrategy.buildRegex(null, null, - objStr - , null, objectTypeInfo); - p = Pattern.compile(tripleRowRegex.getRow()); - String osp_row = new String(serialize.get(OSP).getRow()); - matcher = p.matcher(osp_row); - assertTrue(matcher.matches()); - - tripleRowRegex = ospStrategy.buildRegex(null, null, - objStr - , null, objectTypeInfo); - p = Pattern.compile(tripleRowRegex.getRow()); - matcher = p.matcher(new String(dupTriple_str.get(OSP).getRow())); - assertFalse(matcher.matches()); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java deleted file mode 100644 index 81e946848..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java +++ /dev/null @@ -1,175 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.*; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; - -import org.apache.hadoop.io.Text; -import org.junit.Before; -import org.openrdf.model.impl.URIImpl; - -import java.util.Map; - -/** - * Date: 7/14/12 - * Time: 11:46 AM - */ -public class HashedPoWholeRowTriplePatternStrategyTest extends TestCase { - - RyaURI uri = new RyaURI("urn:test#1234"); - RyaURI uri2 = new RyaURI("urn:test#1235"); - RyaURIRange rangeURI = new RyaURIRange(uri, uri2); - RyaURIRange rangeURI2 = new RyaURIRange(new RyaURI("urn:test#1235"), new RyaURI("urn:test#1236")); - HashedPoWholeRowTriplePatternStrategy strategy = new HashedPoWholeRowTriplePatternStrategy(); - RyaContext ryaContext = RyaContext.getInstance(); - RyaTripleContext ryaTripleContext; - - RyaType customType1 = new RyaType(new URIImpl("urn:custom#type"), "1234"); - RyaType customType2 = new RyaType(new URIImpl("urn:custom#type"), "1235"); - RyaType customType3 = new RyaType(new URIImpl("urn:custom#type"), "1236"); - RyaTypeRange customTypeRange1 = new RyaTypeRange(customType1, customType2); - RyaTypeRange customTypeRange2 = new RyaTypeRange(customType2, customType3); - - @Before - public void setUp() { - MockRdfCloudConfiguration config = new MockRdfCloudConfiguration(); - config.set(MockRdfCloudConfiguration.CONF_PREFIX_ROW_WITH_HASH, Boolean.TRUE.toString()); - ryaTripleContext = RyaTripleContext.getInstance(config); - } - - public void testPoRange() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - - Map.Entry entry = strategy.defineRange(null, uri, rangeURI, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(null, uri, rangeURI2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - private void assertContains(ByteRange value, byte[] row) { - Text rowText = new Text(row); - Text startText = new Text(value.getStart()); - Text endText = new Text(value.getEnd()); - assertTrue((startText.compareTo(rowText) <= 0) &&(endText.compareTo(rowText) >= 0)) ; - } - - private void assertContainsFalse(ByteRange value, byte[] row) { - Text rowText = new Text(row); - Text startText = new Text(value.getStart()); - Text endText = new Text(value.getEnd()); - assertFalse((startText.compareTo(rowText) <= 0) &&(endText.compareTo(rowText) >= 0)) ; - } - - public void testPoRangeCustomType() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, customType1, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - - Map.Entry entry = strategy.defineRange(null, uri, customTypeRange1, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(null, uri, customTypeRange2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testPo() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - - Map.Entry entry = strategy.defineRange(null, uri, uri, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(null, uri, uri2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testPoCustomType() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, customType1, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - - Map.Entry entry = strategy.defineRange(null, uri, customType1, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(null, uri, customType2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testPosRange() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - - Map.Entry entry = strategy.defineRange(rangeURI, uri, uri, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(rangeURI2, uri, uri, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testPRange() throws Exception { - Map.Entry entry = strategy.defineRange(null, rangeURI, null, null, null); - assertNull(entry); - } - - public void testP() throws Exception { - Map.Entry entry = strategy.defineRange(null, uri, null, null, null); - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - assertContains(entry.getValue(), tripleRow.getRow()); - } - - public void testHandles() throws Exception { - //po(ng) - assertTrue(strategy.handles(null, uri, uri, null)); - assertTrue(strategy.handles(null, uri, uri, uri)); - //po_r(s)(ng) - assertTrue(strategy.handles(rangeURI, uri, uri, null)); - assertTrue(strategy.handles(rangeURI, uri, uri, uri)); - //p(ng) - assertTrue(strategy.handles(null, uri, null, null)); - assertTrue(strategy.handles(null, uri, null, uri)); - //p_r(o)(ng) - assertTrue(strategy.handles(null, uri, rangeURI, null)); - assertTrue(strategy.handles(null, uri, rangeURI, uri)); - //r(p)(ng) - assertFalse(strategy.handles(null, rangeURI, null, null)); - assertFalse(strategy.handles(null, rangeURI, null, uri)); - - //false cases - //sp.. - assertFalse(strategy.handles(uri, uri, null, null)); - //r(s)_p - assertFalse(strategy.handles(rangeURI, uri, null, null)); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java deleted file mode 100644 index f7518666e..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java +++ /dev/null @@ -1,199 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -// -import java.util.Map; - -import junit.framework.TestCase; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaTypeRange; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.domain.RyaURIRange; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; - -import org.apache.hadoop.io.Text; -import org.junit.Before; -import org.openrdf.model.impl.URIImpl; - -/** - * Date: 7/14/12 - * Time: 7:47 AM - */ -public class HashedSpoWholeRowTriplePatternStrategyTest extends TestCase { - - RyaURI uri = new RyaURI("urn:test#1234"); - RyaURI uri2 = new RyaURI("urn:test#1235"); - RyaURIRange rangeURI = new RyaURIRange(uri, uri2); - RyaURIRange rangeURI2 = new RyaURIRange(new RyaURI("urn:test#1235"), new RyaURI("urn:test#1236")); - HashedSpoWholeRowTriplePatternStrategy strategy = new HashedSpoWholeRowTriplePatternStrategy(); - RyaContext ryaContext = RyaContext.getInstance(); - RyaTripleContext ryaTripleContext; - - RyaType customType1 = new RyaType(new URIImpl("urn:custom#type"), "1234"); - RyaType customType2 = new RyaType(new URIImpl("urn:custom#type"), "1235"); - RyaType customType3 = new RyaType(new URIImpl("urn:custom#type"), "1236"); - RyaTypeRange customTypeRange1 = new RyaTypeRange(customType1, customType2); - RyaTypeRange customTypeRange2 = new RyaTypeRange(customType2, customType3); - - @Before - public void setUp() { - MockRdfCloudConfiguration config = new MockRdfCloudConfiguration(); - config.set(MockRdfCloudConfiguration.CONF_PREFIX_ROW_WITH_HASH, Boolean.TRUE.toString()); - ryaTripleContext = RyaTripleContext.getInstance(config); - } - - public void testSpo() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - Map.Entry entry = strategy.defineRange(uri, uri, uri, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - - entry = strategy.defineRange(uri, uri, uri2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - private void assertContains(ByteRange value, byte[] row) { - Text rowText = new Text(row); - Text startText = new Text(value.getStart()); - Text endText = new Text(value.getEnd()); - assertTrue((startText.compareTo(rowText) <= 0) &&(endText.compareTo(rowText) >= 0)) ; - } - - private void assertContainsFalse(ByteRange value, byte[] row) { - Text rowText = new Text(row); - Text startText = new Text(value.getStart()); - Text endText = new Text(value.getEnd()); - assertFalse((startText.compareTo(rowText) <= 0) &&(endText.compareTo(rowText) >= 0)) ; - } - - public void testSpoCustomType() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, customType1, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - Map.Entry entry = strategy.defineRange(uri, uri, customType1, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(uri, uri, customType2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testSpoRange() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - Map.Entry entry = strategy.defineRange(uri, uri, rangeURI, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(uri, uri, rangeURI2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testSpoRangeCustomType() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, customType1, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - Map.Entry entry = strategy.defineRange(uri, uri, customTypeRange1, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(uri, uri, customTypeRange2, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testSp() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - Map.Entry entry = strategy.defineRange(uri, uri, null, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - entry = strategy.defineRange(uri, uri2, null, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testSpRange() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - Map.Entry entry = strategy.defineRange(uri, rangeURI, null, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - entry = strategy.defineRange(uri, rangeURI2, null, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testS() throws Exception { - Map serialize = ryaTripleContext.serializeTriple( - new RyaStatement(uri, uri, uri, null)); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - - Map.Entry entry = strategy.defineRange(uri, null, null, null, null); - assertContains(entry.getValue(), tripleRow.getRow()); - - entry = strategy.defineRange(uri2, null, null, null, null); - assertContainsFalse(entry.getValue(), tripleRow.getRow()); - } - - public void testSRange() throws Exception { - - Map.Entry entry = strategy.defineRange(rangeURI, null, null, null, null); - assertNull(entry); - } - - public void testHandles() throws Exception { - //spo(ng) - assertTrue(strategy.handles(uri, uri, uri, null)); - assertTrue(strategy.handles(uri, uri, uri, uri)); - //sp(ng) - assertTrue(strategy.handles(uri, uri, null, null)); - assertTrue(strategy.handles(uri, uri, null, uri)); - //s(ng) - assertTrue(strategy.handles(uri, null, null, null)); - assertTrue(strategy.handles(uri, null, null, uri)); - //sp_r(o)(ng) - assertTrue(strategy.handles(uri, uri, rangeURI, null)); - assertTrue(strategy.handles(uri, uri, rangeURI, uri)); - //s_r(p)(ng) - assertTrue(strategy.handles(uri, rangeURI, null, null)); - assertTrue(strategy.handles(uri, rangeURI, null, uri)); - - //fail - //s_r(p)_r(o) - assertFalse(strategy.handles(uri, rangeURI, rangeURI, null)); - - //s==null - assertFalse(strategy.handles(null, uri, uri, null)); - - //s_r(o) - assertFalse(strategy.handles(uri, null, rangeURI, null)); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java deleted file mode 100644 index ddb7fa8c5..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java +++ /dev/null @@ -1,32 +0,0 @@ -package mvm.rya.api.query.strategy.wholerow; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; - -public class MockRdfCloudConfiguration extends RdfCloudTripleStoreConfiguration { - - @Override - public RdfCloudTripleStoreConfiguration clone() { - return this; - } - -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java deleted file mode 100644 index 57b27bea9..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.api.query.strategy.wholerow; - -// -//import junit.framework.TestCase; -//import mvm.rya.api.RdfCloudTripleStoreConstants; -//import mvm.rya.api.domain.*; -//import mvm.rya.api.resolver.RyaContext; -//import mvm.rya.api.resolver.triple.TripleRow; -//import org.apache.accumulo.core.data.Key; -//import org.apache.accumulo.core.data.Range; -//import org.apache.hadoop.io.Text; -//import org.openrdf.model.impl.URIImpl; -// -//import java.util.Map; -// -///** -// * Date: 7/14/12 -// * Time: 11:46 AM -// */ -//public class OspWholeRowTriplePatternStrategyTest extends TestCase { -// RyaURI uri = new RyaURI("urn:test#1234"); -// RyaURI uri2 = new RyaURI("urn:test#1235"); -// RyaURIRange rangeURI = new RyaURIRange(uri, uri2); -// RyaURIRange rangeURI2 = new RyaURIRange(new RyaURI("urn:test#1235"), new RyaURI("urn:test#1236")); -// -// RyaType customType1 = new RyaType(new URIImpl("urn:custom#type"), "1234"); -// RyaType customType2 = new RyaType(new URIImpl("urn:custom#type"), "1235"); -// RyaType customType3 = new RyaType(new URIImpl("urn:custom#type"), "1236"); -// RyaTypeRange customTypeRange1 = new RyaTypeRange(customType1, customType2); -// RyaTypeRange customTypeRange2 = new RyaTypeRange(customType2, customType3); -// -// OspWholeRowTriplePatternStrategy strategy = new OspWholeRowTriplePatternStrategy(); -// RyaContext ryaContext = RyaContext.getInstance(); -// -// public void testO() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); -// Key key = new Key(new Text(tripleRow.getRow())); -// Map.Entry entry = strategy.defineRange(null, null, uri, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(null, null, uri2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testORange() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(null, null, rangeURI, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(null, null, rangeURI2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testOs() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, null, uri, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(uri2, null, uri, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testOsRange() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(rangeURI, null, uri, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(rangeURI2, null, uri, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testOsRangeCustomType() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, customType1, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(rangeURI, null, customType1, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(rangeURI2, null, customType2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testHandles() throws Exception { -// //os(ng) -// assertTrue(strategy.handles(uri, null, uri, null)); -// assertTrue(strategy.handles(uri, null, uri, uri)); -// //o_r(s)(ng) -// assertTrue(strategy.handles(rangeURI, null, uri, null)); -// assertTrue(strategy.handles(rangeURI, null, uri, uri)); -// //o(ng) -// assertTrue(strategy.handles(null, null, uri, null)); -// assertTrue(strategy.handles(null, null, uri, uri)); -// //r(o) -// assertTrue(strategy.handles(null, null, rangeURI, null)); -// assertTrue(strategy.handles(null, null, rangeURI, uri)); -// -// //false -// assertFalse(strategy.handles(uri, null, rangeURI, null)); -// } -//} diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java deleted file mode 100644 index 1079bf8de..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.api.query.strategy.wholerow; - -// -//import junit.framework.TestCase; -//import mvm.rya.api.RdfCloudTripleStoreConstants; -//import mvm.rya.api.domain.*; -//import mvm.rya.api.resolver.RyaContext; -//import mvm.rya.api.resolver.triple.TripleRow; -//import org.apache.accumulo.core.data.Key; -//import org.apache.accumulo.core.data.Range; -//import org.apache.hadoop.io.Text; -//import org.openrdf.model.impl.URIImpl; -// -//import java.util.Map; -// -///** -// * Date: 7/14/12 -// * Time: 11:46 AM -// */ -//public class PoWholeRowTriplePatternStrategyTest extends TestCase { -// -// RyaURI uri = new RyaURI("urn:test#1234"); -// RyaURI uri2 = new RyaURI("urn:test#1235"); -// RyaURIRange rangeURI = new RyaURIRange(uri, uri2); -// RyaURIRange rangeURI2 = new RyaURIRange(new RyaURI("urn:test#1235"), new RyaURI("urn:test#1236")); -// PoWholeRowTriplePatternStrategy strategy = new PoWholeRowTriplePatternStrategy(); -// RyaContext ryaContext = RyaContext.getInstance(); -// -// RyaType customType1 = new RyaType(new URIImpl("urn:custom#type"), "1234"); -// RyaType customType2 = new RyaType(new URIImpl("urn:custom#type"), "1235"); -// RyaType customType3 = new RyaType(new URIImpl("urn:custom#type"), "1236"); -// RyaTypeRange customTypeRange1 = new RyaTypeRange(customType1, customType2); -// RyaTypeRange customTypeRange2 = new RyaTypeRange(customType2, customType3); -// -// public void testPoRange() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(null, uri, rangeURI, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(null, uri, rangeURI2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testPoRangeCustomType() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, customType1, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(null, uri, customTypeRange1, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(null, uri, customTypeRange2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testPo() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(null, uri, uri, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(null, uri, uri2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testPoCustomType() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, customType1, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(null, uri, customType1, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(null, uri, customType2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testPosRange() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(rangeURI, uri, uri, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(rangeURI2, uri, uri, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testPRange() throws Exception { -// Map.Entry entry = strategy.defineRange(null, rangeURI, null, null, null); -// Map serialize = ryaContext.serializeTriple(new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); -// Key key = new Key(new Text(tripleRow.getRow())); -// assertTrue(entry.getValue().contains(key)); -// } -// -// public void testP() throws Exception { -// Map.Entry entry = strategy.defineRange(null, uri, null, null, null); -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); -// Key key = new Key(new Text(tripleRow.getRow())); -// assertTrue(entry.getValue().contains(key)); -// } -// -// public void testHandles() throws Exception { -// //po(ng) -// assertTrue(strategy.handles(null, uri, uri, null)); -// assertTrue(strategy.handles(null, uri, uri, uri)); -// //po_r(s)(ng) -// assertTrue(strategy.handles(rangeURI, uri, uri, null)); -// assertTrue(strategy.handles(rangeURI, uri, uri, uri)); -// //p(ng) -// assertTrue(strategy.handles(null, uri, null, null)); -// assertTrue(strategy.handles(null, uri, null, uri)); -// //p_r(o)(ng) -// assertTrue(strategy.handles(null, uri, rangeURI, null)); -// assertTrue(strategy.handles(null, uri, rangeURI, uri)); -// //r(p)(ng) -// assertTrue(strategy.handles(null, rangeURI, null, null)); -// assertTrue(strategy.handles(null, rangeURI, null, uri)); -// -// //false cases -// //sp.. -// assertFalse(strategy.handles(uri, uri, null, null)); -// //r(s)_p -// assertFalse(strategy.handles(rangeURI, uri, null, null)); -// } -//} diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java deleted file mode 100644 index 019a3aa36..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.api.query.strategy.wholerow; - -// -//import junit.framework.TestCase; -//import mvm.rya.api.RdfCloudTripleStoreConstants; -//import mvm.rya.api.domain.*; -//import mvm.rya.api.resolver.RyaContext; -//import mvm.rya.api.resolver.triple.TripleRow; -//import org.apache.accumulo.core.data.Key; -//import org.apache.accumulo.core.data.Range; -//import org.apache.hadoop.io.Text; -//import org.openrdf.model.impl.URIImpl; -// -//import java.util.Map; -// -///** -// * Date: 7/14/12 -// * Time: 7:47 AM -// */ -//public class SpoWholeRowTriplePatternStrategyTest extends TestCase { -// -// RyaURI uri = new RyaURI("urn:test#1234"); -// RyaURI uri2 = new RyaURI("urn:test#1235"); -// RyaURIRange rangeURI = new RyaURIRange(uri, uri2); -// RyaURIRange rangeURI2 = new RyaURIRange(new RyaURI("urn:test#1235"), new RyaURI("urn:test#1236")); -// SpoWholeRowTriplePatternStrategy strategy = new SpoWholeRowTriplePatternStrategy(); -// RyaContext ryaContext = RyaContext.getInstance(); -// -// RyaType customType1 = new RyaType(new URIImpl("urn:custom#type"), "1234"); -// RyaType customType2 = new RyaType(new URIImpl("urn:custom#type"), "1235"); -// RyaType customType3 = new RyaType(new URIImpl("urn:custom#type"), "1236"); -// RyaTypeRange customTypeRange1 = new RyaTypeRange(customType1, customType2); -// RyaTypeRange customTypeRange2 = new RyaTypeRange(customType2, customType3); -// -// public void testSpo() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, uri, uri, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(uri, uri, uri2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testSpoCustomType() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, customType1, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, uri, customType1, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(uri, uri, customType2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testSpoRange() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, uri, rangeURI, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(uri, uri, rangeURI2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testSpoRangeCustomType() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, customType1, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, uri, customTypeRange1, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(uri, uri, customTypeRange2, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testSp() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, uri, null, null, null); -// assertTrue(entry.getValue().contains(key)); -// entry = strategy.defineRange(uri, uri2, null, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testSpRange() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, rangeURI, null, null, null); -// assertTrue(entry.getValue().contains(key)); -// entry = strategy.defineRange(uri, rangeURI2, null, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testS() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(uri, null, null, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(uri2, null, null, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testSRange() throws Exception { -// Map serialize = ryaContext.serializeTriple( -// new RyaStatement(uri, uri, uri, null)); -// TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); -// Key key = new Key(new Text(tripleRow.getRow())); -// -// Map.Entry entry = strategy.defineRange(rangeURI, null, null, null, null); -// assertTrue(entry.getValue().contains(key)); -// -// entry = strategy.defineRange(rangeURI2, null, null, null, null); -// assertFalse(entry.getValue().contains(key)); -// } -// -// public void testHandles() throws Exception { -// //spo(ng) -// assertTrue(strategy.handles(uri, uri, uri, null)); -// assertTrue(strategy.handles(uri, uri, uri, uri)); -// //sp(ng) -// assertTrue(strategy.handles(uri, uri, null, null)); -// assertTrue(strategy.handles(uri, uri, null, uri)); -// //s(ng) -// assertTrue(strategy.handles(uri, null, null, null)); -// assertTrue(strategy.handles(uri, null, null, uri)); -// //sp_r(o)(ng) -// assertTrue(strategy.handles(uri, uri, rangeURI, null)); -// assertTrue(strategy.handles(uri, uri, rangeURI, uri)); -// //s_r(p)(ng) -// assertTrue(strategy.handles(uri, rangeURI, null, null)); -// assertTrue(strategy.handles(uri, rangeURI, null, uri)); -// //r(s) -// assertTrue(strategy.handles(rangeURI, null, null, null)); -// -// //fail -// //s_r(p)_r(o) -// assertFalse(strategy.handles(uri, rangeURI, rangeURI, null)); -// -// //s==null -// assertFalse(strategy.handles(null, uri, uri, null)); -// -// //s_r(o) -// assertFalse(strategy.handles(uri, null, rangeURI, null)); -// } -//} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java deleted file mode 100644 index 4363fd37b..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java +++ /dev/null @@ -1,86 +0,0 @@ -package mvm.rya.api.resolver; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Map; - -import junit.framework.TestCase; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.query.strategy.AbstractTriplePatternStrategyTest.MockRdfConfiguration; -import mvm.rya.api.query.strategy.wholerow.MockRdfCloudConfiguration; -import mvm.rya.api.resolver.triple.TripleRow; - -import org.openrdf.model.impl.URIImpl; - -/** - */ -public class RyaContextTest extends TestCase { - - public void testDefaultSerialization() throws Exception { - RyaContext instance = RyaContext.getInstance(); - //plain string - RyaType ryaType = new RyaType("mydata"); - byte[] serialize = instance.serialize(ryaType); - assertEquals(ryaType, instance.deserialize(serialize)); - - //uri - RyaURI ryaURI = new RyaURI("urn:test#1234"); - serialize = instance.serialize(ryaURI); - RyaType deserialize = instance.deserialize(serialize); - assertEquals(ryaURI, deserialize); - - //custom type - ryaType = new RyaType(new URIImpl("urn:test#customDataType"), "mydata"); - serialize = instance.serialize(ryaType); - assertEquals(ryaType, instance.deserialize(serialize)); - } - - public void testTripleRowSerialization() throws Exception { - RyaURI subj = new RyaURI("urn:test#subj"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaType obj = new RyaType("mydata"); - RyaStatement statement = new RyaStatement(subj, pred, obj); - RyaTripleContext instance = RyaTripleContext.getInstance(new MockRdfCloudConfiguration()); - - Map map = instance.serializeTriple(statement); - TripleRow tripleRow = map.get(TABLE_LAYOUT.SPO); - assertEquals(statement, instance.deserializeTriple(TABLE_LAYOUT.SPO, tripleRow)); - } - - public void testHashedTripleRowSerialization() throws Exception { - RyaURI subj = new RyaURI("urn:test#subj"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaType obj = new RyaType("mydata"); - RyaStatement statement = new RyaStatement(subj, pred, obj); - MockRdfCloudConfiguration config = new MockRdfCloudConfiguration(); - config.set(MockRdfCloudConfiguration.CONF_PREFIX_ROW_WITH_HASH, Boolean.TRUE.toString()); - RyaTripleContext instance = RyaTripleContext.getInstance(config); - - Map map = instance.serializeTriple(statement); - TripleRow tripleRow = map.get(TABLE_LAYOUT.SPO); - assertEquals(statement, instance.deserializeTriple(TABLE_LAYOUT.SPO, tripleRow)); - } - -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java deleted file mode 100644 index 012641411..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java +++ /dev/null @@ -1,40 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.api.domain.RyaType; -import org.openrdf.model.impl.URIImpl; - -/** - * Date: 7/16/12 - * Time: 2:47 PM - */ -public class CustomDatatypeResolverTest extends TestCase { - - public void testCustomDataTypeSerialization() throws Exception { - RyaType ryaType = new RyaType(new URIImpl("urn:test#datatype"), "testdata"); - byte[] serialize = new CustomDatatypeResolver().serialize(ryaType); - RyaType deserialize = new CustomDatatypeResolver().deserialize(serialize); - assertEquals(ryaType, deserialize); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java deleted file mode 100644 index 7dfa8ea62..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java +++ /dev/null @@ -1,177 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static org.junit.Assert.*; - -import java.util.Date; -import java.util.GregorianCalendar; - -import javax.xml.datatype.DatatypeFactory; -import javax.xml.datatype.XMLGregorianCalendar; - -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaTypeResolverException; - -import org.junit.Ignore; -import org.junit.Test; -import org.openrdf.model.impl.CalendarLiteralImpl; -import org.openrdf.model.vocabulary.XMLSchema; - -/** - * Test serializing and deserializing. - * Notes: - * The serialization, deserialization fills in some information: - * If preserving uncertainty, or preserving the source timezone, then don't use XML type tag. - * - uncertainty: missing time hh:mm:ss becomes 00:00:00 - * - uncertainty: missing milliseconds (.123) become .000. - * - uncertainty: missing timezone becomes the system local timezone. - * - timezone: converted to the equivalent Z timezone. - * - a type XMLSchema.DATE become XMLSchema.DATETIME after deserialized - * - * ex: run in timezone eastern time (GMT-5:00): - * before= 2000-02-02 type = XMLSchema.DATE - * deserialized= 2000-02-02T05:00:00.000Z type = XMLSchema.DATETIME - */ -public class DateTimeRyaTypeResolverTest { - @Test - public void testDateTime() throws Exception { - long currentTime = 1342182689285l; - Date date = new Date(currentTime); - GregorianCalendar gc = new GregorianCalendar(); - gc.setTimeInMillis(date.getTime()); - XMLGregorianCalendar xmlGregorianCalendar = DatatypeFactory.newInstance().newXMLGregorianCalendar(gc); - CalendarLiteralImpl literal = new CalendarLiteralImpl(xmlGregorianCalendar); - byte[] serialize = new DateTimeRyaTypeResolver().serialize(RdfToRyaConversions.convertLiteral(literal)); - RyaType deserialize = new DateTimeRyaTypeResolver().deserialize(serialize); - assertEquals("2012-07-13T12:31:29.285Z", deserialize.getData()); - assertEquals(XMLSchema.DATETIME, deserialize.getDataType()); - } - @Test - public void testFull() throws Exception { - String currentTime = "2000-01-01T00:00:01.111Z"; - assertSerializeAndDesDateTime("2000-01-01T00:00:01.111Z"); - - } - @Test - public void testNoMilliSeconds() throws Exception { - assertSerializeAndDesDateTime("2000-01-01T00:00:01Z","2000-01-01T00:00:01.000Z"); - - } - @Test - public void testDateNoTimeNoZone() throws Exception { - String beforeDate = "2000-02-02"; - String afterDate="2000-02-0(1|2|3)T\\d\\d:\\d\\d:00\\.000Z"; - RyaType deserialize = serializeAndDeserialize(beforeDate, XMLSchema.DATE); - final String afterActual = deserialize.getData(); - assertTrue("Before='"+beforeDate+"'; Expected should match actual regex after='"+afterDate+"' deserialized:"+afterActual, afterActual.matches(afterDate)); - assertEquals(XMLSchema.DATETIME, deserialize.getDataType()); - } - @Test - public void testDateZoneNoTime() throws Exception { - // if you see this: - //java.lang.IllegalArgumentException: Invalid format: "2000-02-02Z" is malformed at "Z" - // use this: "2000-02-02TZ"; - String currentTime = "2000-02-02TZ"; - RyaType deserialize = serializeAndDeserialize(currentTime, XMLSchema.DATE); - assertEquals("Before expected should match after actual deserialized:","2000-02-02T00:00:00.000Z", deserialize.getData()); - assertEquals(XMLSchema.DATETIME, deserialize.getDataType()); - } - @Test - public void testNoZone() throws Exception { - String beforeDate = "2000-01-02T00:00:01"; - String afterDate="2000-01-0(1|2|3)T\\d\\d:\\d\\d:01\\.000Z"; - RyaType deserialize = serializeAndDeserialize(beforeDate, XMLSchema.DATE); - final String afterActual = deserialize.getData(); - assertTrue("Before='"+beforeDate+"'; Expected should match actual regex after='"+afterDate+"' deserialized:"+afterActual, afterActual.matches(afterDate)); - assertEquals(XMLSchema.DATETIME, deserialize.getDataType()); - - } - @Test - public void testMilliSecondsNoZone() throws Exception { - String beforeDate="2002-02-02T02:02:02.222"; - String afterDate="2002-02-02T\\d\\d:\\d\\d:02\\.222.*"; - RyaType deserialize = serializeAndDeserialize(beforeDate, XMLSchema.DATETIME); - final String afterActual = deserialize.getData(); - assertTrue("Before='"+beforeDate+"'; Expected should match actual regex after='"+afterDate+"' deserialized:"+afterActual, afterActual.matches(afterDate)); - assertEquals(XMLSchema.DATETIME, deserialize.getDataType()); - - } - @Test - public void testHistoryAndFuture() throws Exception { - assertSerializeAndDesDateTime("-2000-01-01T00:00:01Z","-2000-01-01T00:00:01.000Z"); - assertSerializeAndDesDateTime("111-01-01T00:00:01Z","0111-01-01T00:00:01.000Z"); - assertSerializeAndDesDateTime("12345-01-01T00:00:01Z","12345-01-01T00:00:01.000Z"); - } - - @Test - public void testTimeZone() throws Exception { - assertSerializeAndDesDateTime( "2000-01-01T00:00:01+01:00", "1999-12-31T23:00:01.000Z"); - assertSerializeAndDesDateTime( "2000-01-01T00:00:01+02:30", "1999-12-31T21:30:01.000Z"); - assertSerializeAndDesDateTime("2000-01-01T00:00:01.123-02:00", "2000-01-01T02:00:01.123Z"); - assertSerializeAndDesDateTime( "111-01-01T00:00:01+14:00", "0110-12-31T10:00:01.000Z" ); - assertSerializeAndDesDateTime( "12345-01-01T00:00:01-14:00","12345-01-01T14:00:01.000Z"); - assertSerializeAndDesDateTime( "1-01-01T00:00:01+14:00", "0000-12-31T10:00:01.000Z" ); - } - - @Test - public void testGarbageIn() throws Exception { - String currentTime = "Blablabla"; - RyaType ryaType = new RyaType(XMLSchema.DATETIME, currentTime ); - Throwable threw=null; - try { - new DateTimeRyaTypeResolver().serialize(ryaType); - } catch (java.lang.IllegalArgumentException exception) { - threw = exception; - } - assertNotNull("Expected to catch bad format message.",threw); - assertEquals("Caught bad format message.","Invalid format: \"Blablabla\"", threw.getMessage()); - } - /** - * Do the test on the DateTime - * @param dateTimeString - * @throws RyaTypeResolverException - */ - private void assertSerializeAndDesDateTime(String dateTimeString) throws RyaTypeResolverException { - assertSerializeAndDesDateTime(dateTimeString, dateTimeString); - } - private void assertSerializeAndDesDateTime(String beforeDate, String afterDate ) throws RyaTypeResolverException { - RyaType deserialize = serializeAndDeserialize(beforeDate, XMLSchema.DATETIME); - assertEquals("Before='"+beforeDate+"'; Expected should match actual after deserialized:",afterDate, deserialize.getData()); - assertEquals(XMLSchema.DATETIME, deserialize.getDataType()); - } - /** - * Serialize a datetime string, then deserialize as a ryaType. - * @param dateTimeString - * @param type if null , use default: XMLSchema.DATETIME - * @return - * @throws RyaTypeResolverException - */ - private RyaType serializeAndDeserialize(String dateTimeString, org.openrdf.model.URI type ) throws RyaTypeResolverException { - if (type == null) - type = XMLSchema.DATETIME; - RyaType ryaType = new RyaType(type, dateTimeString ); - byte[] serialize = new DateTimeRyaTypeResolver().serialize(ryaType); - return new DateTimeRyaTypeResolver().deserialize(serialize); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java deleted file mode 100644 index 2c5f43d59..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java +++ /dev/null @@ -1,46 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.api.domain.RyaType; -import org.openrdf.model.vocabulary.XMLSchema; - -import java.util.Random; - -/** - * Date: 7/20/12 - * Time: 9:43 AM - */ -public class DoubleRyaTypeResolverTest extends TestCase { - - public void testDoubleSerialization() throws Exception { - Double d = randomDouble(); - RyaType ryaType = new RyaType(XMLSchema.DOUBLE, d.toString()); - byte[] serialize = new DoubleRyaTypeResolver().serialize(ryaType); - assertEquals(d, Double.parseDouble(new DoubleRyaTypeResolver().deserialize(serialize).getData())); - } - - private double randomDouble() { - return new Random(System.currentTimeMillis()).nextDouble(); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java deleted file mode 100644 index 60cded09f..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java +++ /dev/null @@ -1,44 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.api.domain.RyaType; -import org.openrdf.model.vocabulary.XMLSchema; - -import java.util.Random; - -/** - * Date: 7/20/12 - * Time: 10:17 AM - */ -public class IntegerRyaTypeResolverTest extends TestCase { - public void testIntegerSerialization() throws Exception { - Integer i = randomInt(); - byte[] serialize = new IntegerRyaTypeResolver().serialize(new RyaType(XMLSchema.INTEGER, i.toString())); - assertEquals(i, new Integer(new IntegerRyaTypeResolver().deserialize(serialize).getData())); - } - - private int randomInt() { - return new Random(System.currentTimeMillis()).nextInt(Integer.MAX_VALUE); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java deleted file mode 100644 index 3dfb9c4e0..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java +++ /dev/null @@ -1,49 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.domain.RyaType; -import org.junit.Test; -import org.openrdf.model.vocabulary.XMLSchema; - -import java.util.Random; - -import static junit.framework.Assert.assertEquals; - -/** - * Date: 9/7/12 - * Time: 2:53 PM - */ -public class LongRyaTypeResolverTest { - - @Test - public void testSerialization() throws Exception { - Long i = randomLong(); - byte[] serialize = new LongRyaTypeResolver().serialize(new RyaType(XMLSchema.LONG, i.toString())); - assertEquals(i, new Long(new LongRyaTypeResolver().deserialize(serialize).getData())); - } - - private long randomLong() { - return new Random(System.currentTimeMillis()).nextLong(); - } - -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java deleted file mode 100644 index f962b8815..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java +++ /dev/null @@ -1,40 +0,0 @@ -package mvm.rya.api.resolver.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; - -/** - * Date: 7/16/12 - * Time: 2:51 PM - */ -public class RyaURIResolverTest extends TestCase { - - public void testSerialization() throws Exception { - RyaURI ryaURI = new RyaURI("urn:testdata#data"); - byte[] serialize = new RyaURIResolver().serialize(ryaURI); - RyaType deserialize = new RyaURIResolver().deserialize(serialize); - assertEquals(ryaURI, deserialize); - } -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java deleted file mode 100644 index 2baa92de4..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java +++ /dev/null @@ -1,124 +0,0 @@ -package mvm.rya.api.resolver.triple.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Map; - -import junit.framework.TestCase; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.triple.TripleRow; - -/** - * Date: 7/25/12 - * Time: 10:52 AM - */ -public class HashedWholeRowTripleResolverTest extends TestCase { - - WholeRowHashedTripleResolver tripleResolver = new WholeRowHashedTripleResolver(); - - public void testSerialize() throws Exception { - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - RyaStatement deserialize = tripleResolver.deserialize(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - deserialize = tripleResolver.deserialize(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, tripleRow); - assertEquals(stmtContext, deserialize); - } - - public void testSerializePO() throws Exception { - RdfCloudTripleStoreConstants.TABLE_LAYOUT po = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO; - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(po); - RyaStatement deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(po); - deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmtContext, deserialize); - } - - public void testSerializeOSP() throws Exception { - RdfCloudTripleStoreConstants.TABLE_LAYOUT po = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP; - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(po); - RyaStatement deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(po); - deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmtContext, deserialize); - } - - public void testSerializeOSPCustomType() throws Exception { - RdfCloudTripleStoreConstants.TABLE_LAYOUT po = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP; - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(po); - RyaStatement deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(po); - deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmtContext, deserialize); - } - -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java deleted file mode 100644 index 30409ffeb..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java +++ /dev/null @@ -1,127 +0,0 @@ -package mvm.rya.api.resolver.triple.impl; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowRegex; - -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Date: 7/25/12 - * Time: 10:52 AM - */ -public class WholeRowTripleResolverTest extends TestCase { - - WholeRowTripleResolver tripleResolver = new WholeRowTripleResolver(); - - public void testSerialize() throws Exception { - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - RyaStatement deserialize = tripleResolver.deserialize(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - deserialize = tripleResolver.deserialize(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, tripleRow); - assertEquals(stmtContext, deserialize); - } - - public void testSerializePO() throws Exception { - RdfCloudTripleStoreConstants.TABLE_LAYOUT po = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO; - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(po); - RyaStatement deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(po); - deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmtContext, deserialize); - } - - public void testSerializeOSP() throws Exception { - RdfCloudTripleStoreConstants.TABLE_LAYOUT po = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP; - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(po); - RyaStatement deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(po); - deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmtContext, deserialize); - } - - public void testSerializeOSPCustomType() throws Exception { - RdfCloudTripleStoreConstants.TABLE_LAYOUT po = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP; - //no context - RyaURI subj = new RyaURI("urn:test#1234"); - RyaURI pred = new RyaURI("urn:test#pred"); - RyaURI obj = new RyaURI("urn:test#obj"); - RyaURI cntxt = new RyaURI("urn:test#cntxt"); - final RyaStatement stmt = new RyaStatement(subj, pred, obj, null, null, null, null, 100l); - final RyaStatement stmtContext = new RyaStatement(subj, pred, obj, cntxt, null, null, null, 100l); - Map serialize = tripleResolver.serialize(stmt); - TripleRow tripleRow = serialize.get(po); - RyaStatement deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmt, deserialize); - - //context - serialize = tripleResolver.serialize(stmtContext); - tripleRow = serialize.get(po); - deserialize = tripleResolver.deserialize(po, tripleRow); - assertEquals(stmtContext, deserialize); - } - -} diff --git a/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java b/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java deleted file mode 100644 index db377e193..000000000 --- a/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.api.utils; - -// -//import junit.framework.TestCase; -//import mvm.rya.api.RdfCloudTripleStoreUtils; -//import org.openrdf.model.Statement; -//import org.openrdf.model.impl.StatementImpl; -// -//import static mvm.rya.api.RdfCloudTripleStoreConstants.*; -// -///** -// * Class RdfIOTest -// * Date: Mar 8, 2012 -// * Time: 10:12:00 PM -// */ -//public class RdfIOTest extends TestCase { -// -// Statement st = new StatementImpl(RTS_SUBJECT, RTS_VERSION_PREDICATE, VERSION); -// int num = 100000; -// -// public void testPerf() throws Exception { -// -// long start = System.currentTimeMillis(); -// for(int i = 0; i < num; i++) { -// byte[] bytes = RdfCloudTripleStoreUtils.writeValue(st.getSubject()); -//// byte[] bytes = RdfIO.writeStatement(st); -//// Statement retSt = RdfIO.readStatement(ByteStreams.newDataInput(bytes), VALUE_FACTORY); -// } -// long dur = System.currentTimeMillis() - start; -// System.out.println("RdfCloudTripleStoreUtils: " + dur); -// -// -// } -// -// public void testPerf2() throws Exception { -// long start = System.currentTimeMillis(); -// for(int i = 0; i < num; i++) { -// byte[] bytes = RdfIO.writeValue(st.getSubject()); -// -//// byte[] bytes = RdfCloudTripleStoreUtils.buildRowWith(RdfCloudTripleStoreUtils.writeValue(st.getSubject()), -//// RdfCloudTripleStoreUtils.writeValue(st.getPredicate()), -//// RdfCloudTripleStoreUtils.writeValue(st.getObject())); -//// Statement retSt = RdfCloudTripleStoreUtils.translateStatementFromRow(ByteStreams.newDataInput(bytes), TABLE_LAYOUT.SPO, VALUE_FACTORY); -// } -// long dur = System.currentTimeMillis() - start; -// System.out.println("RdfIO: " + dur); -// } -//} diff --git a/common/rya.provenance/pom.xml b/common/rya.provenance/pom.xml deleted file mode 100644 index b9f749ce5..000000000 --- a/common/rya.provenance/pom.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - 4.0.0 - - org.apache.rya - rya.common - 3.2.10-SNAPSHOT - - - rya.provenance - Apache Rya Provenance - - - - org.openrdf.sesame - sesame-runtime - - - - org.slf4j - slf4j-api - - - org.slf4j - slf4j-log4j12 - - - - junit - junit - test - - - diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java deleted file mode 100644 index 2ef279afb..000000000 --- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java +++ /dev/null @@ -1,41 +0,0 @@ -package mvm.rya.rdftriplestore.provenance; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.log4j.Logger; - -/** - * Provenance collector that logs queries - */ -public class LoggingProvenanceCollector implements ProvenanceCollector { - - private static final Logger log = Logger.getLogger(LoggingProvenanceCollector.class); - - /* (non-Javadoc) - * @see mvm.rya.rdftriplestore.provenance.ProvenanceCollector#recordQuery(java.lang.String) - */ - public void recordQuery(String query) { - log.debug("User entered query: " + query); - } - - -} diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java deleted file mode 100644 index a0bd89646..000000000 --- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java +++ /dev/null @@ -1,39 +0,0 @@ -package mvm.rya.rdftriplestore.provenance; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.openrdf.repository.RepositoryException; - -/** - * Exception for errors in collecting provenance data - */ -public class ProvenanceCollectionException extends Exception { - - public ProvenanceCollectionException(RepositoryException e) { - super(e); - } - - /** - * - */ - private static final long serialVersionUID = 1L; - -} diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java deleted file mode 100644 index a4ff829bf..000000000 --- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java +++ /dev/null @@ -1,34 +0,0 @@ -package mvm.rya.rdftriplestore.provenance; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/** - * Collects/records provenance data - */ -public interface ProvenanceCollector { - - /** - * Records appropriate metadata about a query - * @param query the query being recorded. cannot be null - * @throws ProvenanceCollectionException - */ - public void recordQuery(String query) throws ProvenanceCollectionException; -} diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java deleted file mode 100644 index e283f2c80..000000000 --- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java +++ /dev/null @@ -1,68 +0,0 @@ -package mvm.rya.rdftriplestore.provenance; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; - -import mvm.rya.rdftriplestore.provenance.rdf.BaseProvenanceModel; -import mvm.rya.rdftriplestore.provenance.rdf.RDFProvenanceModel; - -import org.openrdf.model.Statement; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; - -/** - * Records provenance data to an external rdf triplestore - */ -public class TriplestoreProvenanceCollector implements ProvenanceCollector { - - private RDFProvenanceModel provenanceModel; - private SailRepository provenanceRepo; - private String user; - private String queryType; - - /** - * @param repo the repository to record to - * @param user the user issuing the query - * @param queryType the type of query (SPARQL, etc.) - */ - public TriplestoreProvenanceCollector(SailRepository repo, String user, String queryType){ - provenanceRepo = repo; - provenanceModel = new BaseProvenanceModel(); - this.user = user; - this.queryType = queryType; - } - - /* (non-Javadoc) - * @see mvm.rya.rdftriplestore.provenance.ProvenanceCollector#recordQuery(java.lang.String) - */ - public void recordQuery(String query) throws ProvenanceCollectionException { - List provenanceTriples = provenanceModel.getStatementsForQuery(query, user, queryType); - try { - provenanceRepo.getConnection().add(provenanceTriples); - } catch (RepositoryException e) { - throw new ProvenanceCollectionException(e); - } - - } - - -} diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java deleted file mode 100644 index b8c5f32ae..000000000 --- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java +++ /dev/null @@ -1,68 +0,0 @@ -package mvm.rya.rdftriplestore.provenance.rdf; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.RDF; - -/** - * Basic representation of Provenance data capture in RDF. - */ -public class BaseProvenanceModel implements RDFProvenanceModel { - - private static final ValueFactory vf = ValueFactoryImpl.getInstance(); - private static final Resource queryEventType = vf.createURI("http://rya.com/provenance#QueryEvent"); - private static final URI atTimeProperty = vf.createURI("http://www.w3.org/ns/prov#atTime"); - private static final URI associatedWithUser = vf.createURI("http://rya.com/provenance#associatedWithUser"); - private static final URI queryTypeProp = vf.createURI("http://rya.com/provenance#queryType"); - private static final URI executedQueryProperty = vf.createURI("http://rya.com/provenance#executedQuery"); - private static final String queryNameSpace = "http://rya.com/provenance#queryEvent"; - - /* (non-Javadoc) - * @see mvm.rya.rdftriplestore.provenance.rdf.RDFProvenanceModel#getStatementsForQuery(java.lang.String, java.lang.String, java.lang.String) - */ - public List getStatementsForQuery(String query, String user, String queryType) { - List statements = new ArrayList(); - // create some statements for the query - Resource queryEventResource = vf.createURI(queryNameSpace + UUID.randomUUID().toString()); - Statement queryEventDecl = vf.createStatement(queryEventResource, RDF.TYPE, queryEventType); - statements.add(queryEventDecl); - Statement queryEventTime = vf.createStatement(queryEventResource, atTimeProperty, vf.createLiteral(new Date())); - statements.add(queryEventTime); - Statement queryUser = vf.createStatement(queryEventResource, associatedWithUser, vf.createLiteral(user)); - statements.add(queryUser); - Statement executedQuery = vf.createStatement(queryEventResource, executedQueryProperty, vf.createLiteral(query)); - statements.add(executedQuery); - Statement queryTypeStatement = vf.createStatement(queryEventResource, queryTypeProp, vf.createLiteral(queryType)); - statements.add(queryTypeStatement); - return statements; - } - -} diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java deleted file mode 100644 index c5495f298..000000000 --- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java +++ /dev/null @@ -1,33 +0,0 @@ -package mvm.rya.rdftriplestore.provenance.rdf; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; - -import org.openrdf.model.Statement; - - -public interface RDFProvenanceModel { - - List getStatementsForQuery(String query, String user, String queryType); - - -} diff --git a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java b/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java deleted file mode 100644 index c4314685d..000000000 --- a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java +++ /dev/null @@ -1,51 +0,0 @@ -package mvm.rya.rdftriplestore.provenance; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResult; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.sail.Sail; -import org.openrdf.sail.memory.MemoryStore; - -public class TriplestoreProvenanceCollectorTest { - - @Test - public void testCollect() throws ProvenanceCollectionException, RepositoryException, MalformedQueryException, QueryEvaluationException { - Sail ms = new MemoryStore(); - SailRepository repo = new SailRepository(ms); - repo.initialize(); - TriplestoreProvenanceCollector coll = new TriplestoreProvenanceCollector(repo, "fakeUser", "SPARQL"); - coll.recordQuery("fakeQuery"); - String queryString = "SELECT ?x ?y WHERE { ?x ?p ?y } "; - TupleQuery tupleQuery = repo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString); - TupleQueryResult result = tupleQuery.evaluate(); - // TODO not asserting on the results. - assertTrue(result.hasNext()); - } -} diff --git a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java b/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java deleted file mode 100644 index 99875e2b7..000000000 --- a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java +++ /dev/null @@ -1,38 +0,0 @@ -package mvm.rya.rdftriplestore.provenance.rdf; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.assertTrue; - -import java.util.List; - -import org.junit.Test; -import org.openrdf.model.Statement; - -public class BaseProvenanceModelTest { - - @Test - public void testCreateTriples() { - BaseProvenanceModel model = new BaseProvenanceModel(); - List statements = model.getStatementsForQuery("SELECT ?query where { ?query rdf:type . }", "fakeuser", "SPARQL"); - assertTrue(!statements.isEmpty()); - } -} diff --git a/dao/accumulo.rya/pom.xml b/dao/accumulo.rya/pom.xml deleted file mode 100644 index 5328945e9..000000000 --- a/dao/accumulo.rya/pom.xml +++ /dev/null @@ -1,93 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.dao - 3.2.10-SNAPSHOT - - - accumulo.rya - Apache Rya Accumulo DAO - - - - org.apache.rya - rya.api - - - - - org.apache.accumulo - accumulo-core - - - - org.openrdf.sesame - sesame-rio-ntriples - - - org.openrdf.sesame - sesame-rio-nquads - - - org.openrdf.sesame - sesame-queryalgebra-evaluation - - - - org.openrdf.sesame - sesame-rio-trig - test - - - - junit - junit - test - - - - - mr - - - - org.apache.maven.plugins - maven-shade-plugin - - - - - - - - - - - - - - - - diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java deleted file mode 100644 index ebca6a2d9..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java +++ /dev/null @@ -1,99 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.persist.RdfDAOException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.openrdf.model.Namespace; -import org.openrdf.model.impl.NamespaceImpl; - -import java.io.IOError; -import java.util.Iterator; -import java.util.Map.Entry; - -public class AccumuloNamespaceTableIterator implements - CloseableIteration { - - private boolean open = false; - private Iterator> result; - - public AccumuloNamespaceTableIterator(Iterator> result) throws RdfDAOException { - Preconditions.checkNotNull(result); - open = true; - this.result = result; - } - - @Override - public void close() throws RdfDAOException { - try { - verifyIsOpen(); - open = false; - } catch (IOError e) { - throw new RdfDAOException(e); - } - } - - public void verifyIsOpen() throws RdfDAOException { - if (!open) { - throw new RdfDAOException("Iterator not open"); - } - } - - @Override - public boolean hasNext() throws RdfDAOException { - verifyIsOpen(); - return result != null && result.hasNext(); - } - - @Override - public Namespace next() throws RdfDAOException { - if (hasNext()) { - return getNamespace(result); - } - return null; - } - - public static Namespace getNamespace(Iterator> rowResults) { - for (; rowResults.hasNext(); ) { - Entry next = rowResults.next(); - Key key = next.getKey(); - Value val = next.getValue(); - String cf = key.getColumnFamily().toString(); - String cq = key.getColumnQualifier().toString(); - return new NamespaceImpl(key.getRow().toString(), new String( - val.get())); - } - return null; - } - - @Override - public void remove() throws RdfDAOException { - next(); - } - - public boolean isOpen() { - return open; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java deleted file mode 100644 index 147228b46..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java +++ /dev/null @@ -1,86 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; - -import mvm.rya.accumulo.experimental.AccumuloIndexer; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; - -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.conf.Configuration; - -import com.google.common.collect.Lists; - -/** - * Created by IntelliJ IDEA. - * Date: 4/25/12 - * Time: 3:24 PM - * To change this template use File | Settings | File Templates. - */ -public class AccumuloRdfConfiguration extends RdfCloudTripleStoreConfiguration { - - public static final String MAXRANGES_SCANNER = "ac.query.maxranges"; - - public static final String CONF_ADDITIONAL_INDEXERS = "ac.additional.indexers"; - - public AccumuloRdfConfiguration() { - super(); - } - - public AccumuloRdfConfiguration(Configuration other) { - super(other); - } - - @Override - public AccumuloRdfConfiguration clone() { - return new AccumuloRdfConfiguration(this); - } - - public Authorizations getAuthorizations() { - String[] auths = getAuths(); - if (auths == null || auths.length == 0) - return AccumuloRdfConstants.ALL_AUTHORIZATIONS; - return new Authorizations(auths); - } - - public void setMaxRangesForScanner(Integer max) { - setInt(MAXRANGES_SCANNER, max); - } - - public Integer getMaxRangesForScanner() { - return getInt(MAXRANGES_SCANNER, 2); - } - - public void setAdditionalIndexers(Class... indexers) { - List strs = Lists.newArrayList(); - for (Class ai : indexers){ - strs.add(ai.getName()); - } - - setStrings(CONF_ADDITIONAL_INDEXERS, strs.toArray(new String[]{})); - } - - public List getAdditionalIndexers() { - return getInstances(CONF_ADDITIONAL_INDEXERS, AccumuloIndexer.class); - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java deleted file mode 100644 index 1ec57a787..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java +++ /dev/null @@ -1,40 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.ColumnVisibility; - -/** - * Interface AccumuloRdfConstants - * Date: Mar 1, 2012 - * Time: 7:24:52 PM - */ -public interface AccumuloRdfConstants { - public static final Authorizations ALL_AUTHORIZATIONS = Constants.NO_AUTHS; - - public static final Value EMPTY_VALUE = new Value(new byte[0]); - - public static final ColumnVisibility EMPTY_CV = new ColumnVisibility(new byte[0]); -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java deleted file mode 100644 index a3e067761..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java +++ /dev/null @@ -1,173 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; -import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM; -import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT; -import static mvm.rya.api.RdfCloudTripleStoreConstants.PRED_CF_TXT; -import static mvm.rya.api.RdfCloudTripleStoreConstants.SUBJECT_CF_TXT; -import static mvm.rya.api.RdfCloudTripleStoreConstants.SUBJECTPRED_CF_TXT; -import static mvm.rya.api.RdfCloudTripleStoreConstants.PREDOBJECT_CF_TXT; -import static mvm.rya.api.RdfCloudTripleStoreConstants.SUBJECTOBJECT_CF_TXT; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreStatement; -import mvm.rya.api.layout.TableLayoutStrategy; -import mvm.rya.api.persist.RdfDAOException; -import mvm.rya.api.persist.RdfEvalStatsDAO; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.openrdf.model.Resource; -import org.openrdf.model.Value; - -/** - * Class CloudbaseRdfEvalStatsDAO - * Date: Feb 28, 2012 - * Time: 5:03:16 PM - */ -public class AccumuloRdfEvalStatsDAO implements RdfEvalStatsDAO { - - private boolean initialized = false; - private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - private Collection statements = new ArrayList(); - private Connector connector; - - // private String evalTable = TBL_EVAL; - private TableLayoutStrategy tableLayoutStrategy; - - @Override - public void init() throws RdfDAOException { - try { - if (isInitialized()) { - throw new IllegalStateException("Already initialized"); - } - checkNotNull(connector); - tableLayoutStrategy = conf.getTableLayoutStrategy(); -// evalTable = conf.get(RdfCloudTripleStoreConfiguration.CONF_TBL_EVAL, evalTable); -// conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_EVAL, evalTable); - - TableOperations tos = connector.tableOperations(); - AccumuloRdfUtils.createTableIfNotExist(tos, tableLayoutStrategy.getEval()); -// boolean tableExists = tos.exists(evalTable); -// if (!tableExists) -// tos.create(evalTable); - initialized = true; - } catch (Exception e) { - throw new RdfDAOException(e); - } - } - - - @Override - public void destroy() throws RdfDAOException { - if (!isInitialized()) { - throw new IllegalStateException("Not initialized"); - } - initialized = false; - } - - @Override - public boolean isInitialized() throws RdfDAOException { - return initialized; - } - - public Connector getConnector() { - return connector; - } - - public void setConnector(Connector connector) { - this.connector = connector; - } - - public AccumuloRdfConfiguration getConf() { - return conf; - } - - public void setConf(AccumuloRdfConfiguration conf) { - this.conf = conf; - } - - @Override - public double getCardinality(AccumuloRdfConfiguration conf, - mvm.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF card, - List val, Resource context) throws RdfDAOException { - try { - Authorizations authorizations = conf.getAuthorizations(); - Scanner scanner = connector.createScanner(tableLayoutStrategy.getEval(), authorizations); - Text cfTxt = null; - if (CARDINALITY_OF.SUBJECT.equals(card)) { - cfTxt = SUBJECT_CF_TXT; - } else if (CARDINALITY_OF.PREDICATE.equals(card)) { - cfTxt = PRED_CF_TXT; - } else if (CARDINALITY_OF.OBJECT.equals(card)) { -// cfTxt = OBJ_CF_TXT; //TODO: How do we do object cardinality - return Double.MAX_VALUE; - } else if (CARDINALITY_OF.SUBJECTOBJECT.equals(card)) { - cfTxt = SUBJECTOBJECT_CF_TXT; - } else if (CARDINALITY_OF.SUBJECTPREDICATE.equals(card)) { - cfTxt = SUBJECTPRED_CF_TXT; - } else if (CARDINALITY_OF.PREDICATEOBJECT.equals(card)) { - cfTxt = PREDOBJECT_CF_TXT; - } else throw new IllegalArgumentException("Not right Cardinality[" + card + "]"); - Text cq = EMPTY_TEXT; - if (context != null) { - cq = new Text(context.stringValue().getBytes()); - } - scanner.fetchColumn(cfTxt, cq); - Iterator vals = val.iterator(); - String compositeIndex = vals.next().stringValue(); - while (vals.hasNext()){ - compositeIndex += DELIM + vals.next().stringValue(); - } - scanner.setRange(new Range(new Text(compositeIndex.getBytes()))); - Iterator> iter = scanner.iterator(); - if (iter.hasNext()) { - return Double.parseDouble(new String(iter.next().getValue().get())); - } - } catch (Exception e) { - throw new RdfDAOException(e); - } - - //default - return -1; - } - - @Override - public double getCardinality(AccumuloRdfConfiguration conf, - mvm.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF card, - List val) throws RdfDAOException { - return getCardinality(conf, card, val, null); - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java deleted file mode 100644 index d13f50e11..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.accumulo; - -// -//import com.google.common.collect.Iterators; -//import com.google.common.io.ByteArrayDataInput; -//import com.google.common.io.ByteStreams; -//import info.aduna.iteration.CloseableIteration; -//import mvm.rya.api.RdfCloudTripleStoreConstants; -//import mvm.rya.api.RdfCloudTripleStoreUtils; -//import mvm.rya.api.persist.RdfDAOException; -//import mvm.rya.api.utils.NullableStatementImpl; -//import org.apache.accumulo.core.client.*; -//import org.apache.accumulo.core.data.Key; -//import org.apache.accumulo.core.data.Range; -//import org.apache.accumulo.core.iterators.user.AgeOffFilter; -//import org.apache.accumulo.core.iterators.user.TimestampFilter; -//import org.apache.accumulo.core.security.Authorizations; -//import org.apache.hadoop.io.Text; -//import org.openrdf.model.Resource; -//import org.openrdf.model.Statement; -//import org.openrdf.model.URI; -//import org.openrdf.model.Value; -//import org.openrdf.query.BindingSet; -//import org.slf4j.Logger; -//import org.slf4j.LoggerFactory; -// -//import java.io.IOException; -//import java.util.Collection; -//import java.util.Collections; -//import java.util.HashSet; -//import java.util.Iterator; -//import java.util.Map.Entry; -// -//import static mvm.rya.accumulo.AccumuloRdfConstants.ALL_AUTHORIZATIONS; -//import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -//import static mvm.rya.api.RdfCloudTripleStoreUtils.writeValue; -// -//public class AccumuloRdfQueryIterator implements -// CloseableIteration, RdfDAOException> { -// -// protected final Logger logger = LoggerFactory.getLogger(getClass()); -// -// private boolean open = false; -// private Iterator result; -// private Resource[] contexts; -// private Collection> statements; -// private int numOfThreads = 20; -// -// private RangeBindingSetEntries rangeMap = new RangeBindingSetEntries(); -// private ScannerBase scanner; -// private boolean isBatchScanner = true; -// private Statement statement; -// Iterator iter_bss = null; -// -// private boolean hasNext = true; -// private AccumuloRdfConfiguration conf; -// private TABLE_LAYOUT tableLayout; -// private Text context_txt; -// -// private DefineTripleQueryRangeFactory queryRangeFactory = new DefineTripleQueryRangeFactory(); -// -// public AccumuloRdfQueryIterator(Collection> statements, Connector connector, Resource... contexts) -// throws RdfDAOException { -// this(statements, connector, null, contexts); -// } -// -// public AccumuloRdfQueryIterator(Collection> statements, Connector connector, -// AccumuloRdfConfiguration conf, Resource... contexts) -// throws RdfDAOException { -// this.statements = statements; -// this.contexts = contexts; -// this.conf = conf; -// initialize(connector); -// open = true; -// } -// -// public AccumuloRdfQueryIterator(Resource subject, URI predicate, Value object, Connector connector, -// AccumuloRdfConfiguration conf, Resource[] contexts) throws RdfDAOException { -// this(Collections.>singleton(new RdfCloudTripleStoreUtils.CustomEntry( -// new NullableStatementImpl(subject, predicate, object, contexts), -// null)), connector, conf, contexts); -// } -// -// protected void initialize(Connector connector) -// throws RdfDAOException { -// try { -// //TODO: We cannot span multiple tables here -// Collection ranges = new HashSet(); -// -// result = Iterators.emptyIterator(); -// Long startTime = conf.getStartTime(); -// Long ttl = conf.getTtl(); -// -// Resource context = null; -// for (Entry stmtbs : statements) { -// Statement stmt = stmtbs.getKey(); -// Resource subject = stmt.getSubject(); -// URI predicate = stmt.getPredicate(); -// Value object = stmt.getObject(); -// context = stmt.getContext(); //TODO: assumes the same context for all statements -// logger.debug("Batch Scan, lookup subject[" + subject + "] predicate[" + predicate + "] object[" + object + "] combination"); -// -// Entry entry = queryRangeFactory.defineRange(subject, predicate, object, conf); -// tableLayout = entry.getKey(); -//// isTimeRange = isTimeRange || queryRangeFactory.isTimeRange(); -// Range range = entry.getValue(); -// ranges.add(range); -// rangeMap.ranges.add(new RdfCloudTripleStoreUtils.CustomEntry(range, stmtbs.getValue())); -// } -// -// Authorizations authorizations = AccumuloRdfConstants.ALL_AUTHORIZATIONS; -// String auth = conf.getAuth(); -// if (auth != null) { -// authorizations = new Authorizations(auth.split(",")); -// } -// String table = RdfCloudTripleStoreUtils.layoutToTable(tableLayout, conf); -// result = createScanner(connector, authorizations, table, context, startTime, ttl, ranges); -//// if (isBatchScanner) { -//// ((BatchScanner) scanner).setRanges(ranges); -//// } else { -//// for (Range range : ranges) { -//// ((Scanner) scanner).setRange(range); //TODO: Not good way of doing this -//// } -//// } -//// -//// if (isBatchScanner) { -//// result = ((BatchScanner) scanner).iterator(); -//// } else { -//// result = ((Scanner) scanner).iterator(); -//// } -// } catch (Exception e) { -// throw new RdfDAOException(e); -// } -// } -// -// protected Iterator> createScanner(Connector connector, Authorizations authorizations, String table, Resource context, Long startTime, Long ttl, Collection ranges) throws TableNotFoundException, IOException { -//// ShardedConnector shardedConnector = new ShardedConnector(connector, 4, ta) -// if (rangeMap.ranges.size() > (numOfThreads / 2)) { //TODO: Arbitrary number, make configurable -// BatchScanner scannerBase = connector.createBatchScanner(table, authorizations, numOfThreads); -// scannerBase.setRanges(ranges); -// populateScanner(context, startTime, ttl, scannerBase); -// return scannerBase.iterator(); -// } else { -// isBatchScanner = false; -// Iterator>[] iters = new Iterator[ranges.size()]; -// int i = 0; -// for (Range range : ranges) { -// Scanner scannerBase = connector.createScanner(table, authorizations); -// populateScanner(context, startTime, ttl, scannerBase); -// scannerBase.setRange(range); -// iters[i] = scannerBase.iterator(); -// i++; -// scanner = scannerBase; //TODO: Always overridden, but doesn't matter since Scanner doesn't need to be closed -// } -// return Iterators.concat(iters); -// } -// -// } -// -// protected void populateScanner(Resource context, Long startTime, Long ttl, ScannerBase scannerBase) throws IOException { -// if (context != null) { //default graph -// context_txt = new Text(writeValue(context)); -// scannerBase.fetchColumnFamily(context_txt); -// } -// -//// if (!isQueryTimeBased(conf)) { -// if (startTime != null && ttl != null) { -//// scannerBase.setScanIterators(1, FilteringIterator.class.getName(), "filteringIterator"); -//// scannerBase.setScanIteratorOption("filteringIterator", "0", TimeRangeFilter.class.getName()); -//// scannerBase.setScanIteratorOption("filteringIterator", "0." + TimeRangeFilter.TIME_RANGE_PROP, ttl); -//// scannerBase.setScanIteratorOption("filteringIterator", "0." + TimeRangeFilter.START_TIME_PROP, startTime); -// IteratorSetting setting = new IteratorSetting(1, "fi", TimestampFilter.class.getName()); -// TimestampFilter.setStart(setting, startTime, true); -// TimestampFilter.setEnd(setting, startTime + ttl, true); -// scannerBase.addScanIterator(setting); -// } else if (ttl != null) { -//// scannerBase.setScanIterators(1, FilteringIterator.class.getName(), "filteringIterator"); -//// scannerBase.setScanIteratorOption("filteringIterator", "0", AgeOffFilter.class.getName()); -//// scannerBase.setScanIteratorOption("filteringIterator", "0.ttl", ttl); -// IteratorSetting setting = new IteratorSetting(1, "fi", AgeOffFilter.class.getName()); -// AgeOffFilter.setTTL(setting, ttl); -// scannerBase.addScanIterator(setting); -// } -//// } -// } -// -// @Override -// public void close() throws RdfDAOException { -// if (!open) -// return; -// verifyIsOpen(); -// open = false; -// if (scanner != null && isBatchScanner) { -// ((BatchScanner) scanner).close(); -// } -// } -// -// public void verifyIsOpen() throws RdfDAOException { -// if (!open) { -// throw new RdfDAOException("Iterator not open"); -// } -// } -// -// @Override -// public boolean hasNext() throws RdfDAOException { -// try { -// if (!open) -// return false; -// verifyIsOpen(); -// /** -// * For some reason, the result.hasNext returns false -// * once at the end of the iterator, and then true -// * for every subsequent call. -// */ -// hasNext = (hasNext && result.hasNext()); -// return hasNext || ((iter_bss != null) && iter_bss.hasNext()); -// } catch (Exception e) { -// throw new RdfDAOException(e); -// } -// } -// -// @Override -// public Entry next() throws RdfDAOException { -// try { -// if (!this.hasNext()) -// return null; -// -// return getStatement(result, contexts); -// } catch (Exception e) { -// throw new RdfDAOException(e); -// } -// } -// -// public Entry getStatement( -// Iterator> rowResults, -// Resource... filterContexts) throws IOException { -// try { -// while (true) { -// if (iter_bss != null && iter_bss.hasNext()) { -// return new RdfCloudTripleStoreUtils.CustomEntry(statement, iter_bss.next()); -// } -// -// if (rowResults.hasNext()) { -// Entry entry = rowResults.next(); -// Key key = entry.getKey(); -// ByteArrayDataInput input = ByteStreams.newDataInput(key.getRow().getBytes()); -// statement = RdfCloudTripleStoreUtils.translateStatementFromRow(input, key.getColumnFamily(), tableLayout, RdfCloudTripleStoreConstants.VALUE_FACTORY); -// iter_bss = rangeMap.containsKey(key).iterator(); -// } else -// break; -// } -// } catch (Exception e) { -// throw new IOException(e); -// } -// return null; -// } -// -// @Override -// public void remove() throws RdfDAOException { -// next(); -// } -// -// public int getNumOfThreads() { -// return numOfThreads; -// } -// -// public void setNumOfThreads(int numOfThreads) { -// this.numOfThreads = numOfThreads; -// } -// -// public DefineTripleQueryRangeFactory getQueryRangeFactory() { -// return queryRangeFactory; -// } -// -// public void setQueryRangeFactory(DefineTripleQueryRangeFactory queryRangeFactory) { -// this.queryRangeFactory = queryRangeFactory; -// } -//} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java deleted file mode 100644 index 157fc5a05..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java +++ /dev/null @@ -1,72 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.resolver.triple.TripleRow; -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_BYTES; - -/** - * Class AccumuloRdfUtils - * Date: Mar 1, 2012 - * Time: 7:15:54 PM - */ -public class AccumuloRdfUtils { - private static final Log logger = LogFactory.getLog(AccumuloRdfUtils.class); - - public static void createTableIfNotExist(TableOperations tableOperations, String tableName) throws AccumuloException, AccumuloSecurityException, TableExistsException { - boolean tableExists = tableOperations.exists(tableName); - if (!tableExists) { - logger.debug("Creating accumulo table: " + tableName); - tableOperations.create(tableName); - } - } - - public static Key from(TripleRow tripleRow) { - return new Key(defaultTo(tripleRow.getRow(), EMPTY_BYTES), - defaultTo(tripleRow.getColumnFamily(), EMPTY_BYTES), - defaultTo(tripleRow.getColumnQualifier(), EMPTY_BYTES), - defaultTo(tripleRow.getColumnVisibility(), EMPTY_BYTES), - defaultTo(tripleRow.getTimestamp(), Long.MAX_VALUE)); - } - - public static Value extractValue(TripleRow tripleRow) { - return new Value(defaultTo(tripleRow.getValue(), EMPTY_BYTES)); - } - - private static byte[] defaultTo(byte[] bytes, byte[] def) { - return bytes != null ? bytes : def; - } - - private static Long defaultTo(Long l, Long def) { - return l != null ? l : def; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java deleted file mode 100644 index 764ca8006..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java +++ /dev/null @@ -1,523 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; -import static mvm.rya.accumulo.AccumuloRdfConstants.ALL_AUTHORIZATIONS; -import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT; -import static mvm.rya.api.RdfCloudTripleStoreConstants.INFO_NAMESPACE_TXT; -import static mvm.rya.api.RdfCloudTripleStoreConstants.MAX_MEMORY; -import static mvm.rya.api.RdfCloudTripleStoreConstants.MAX_TIME; -import static mvm.rya.api.RdfCloudTripleStoreConstants.NUM_THREADS; -import static mvm.rya.api.RdfCloudTripleStoreConstants.RTS_SUBJECT_RYA; -import static mvm.rya.api.RdfCloudTripleStoreConstants.RTS_VERSION_PREDICATE_RYA; -import static mvm.rya.api.RdfCloudTripleStoreConstants.VERSION_RYA; -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.accumulo.experimental.AccumuloIndexer; -import mvm.rya.accumulo.query.AccumuloRyaQueryEngine; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.layout.TableLayoutStrategy; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.RyaNamespaceManager; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchDeleter; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.MultiTableBatchWriter; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.io.Text; -import org.openrdf.model.Namespace; - -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; - -/** - * Class AccumuloRyaDAO - * Date: Feb 29, 2012 - * Time: 12:37:22 PM - */ -public class AccumuloRyaDAO implements RyaDAO, RyaNamespaceManager { - private static final Log logger = LogFactory.getLog(AccumuloRyaDAO.class); - - private boolean initialized = false; - private Connector connector; - private BatchWriterConfig batchWriterConfig; - - private MultiTableBatchWriter mt_bw; - - // Do not flush these individually - private BatchWriter bw_spo; - private BatchWriter bw_po; - private BatchWriter bw_osp; - - private BatchWriter bw_ns; - - private List secondaryIndexers; - - private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - private RyaTableMutationsFactory ryaTableMutationsFactory; - private TableLayoutStrategy tableLayoutStrategy; - private AccumuloRyaQueryEngine queryEngine; - private RyaTripleContext ryaContext; - - @Override - public boolean isInitialized() throws RyaDAOException { - return initialized; - } - - @Override - public void init() throws RyaDAOException { - if (initialized) - return; - try { - checkNotNull(conf); - checkNotNull(connector); - - if(batchWriterConfig == null){ - batchWriterConfig = new BatchWriterConfig(); - batchWriterConfig.setMaxMemory(MAX_MEMORY); - batchWriterConfig.setTimeout(MAX_TIME, TimeUnit.MILLISECONDS); - batchWriterConfig.setMaxWriteThreads(NUM_THREADS); - } - - tableLayoutStrategy = conf.getTableLayoutStrategy(); - ryaContext = RyaTripleContext.getInstance(conf); - ryaTableMutationsFactory = new RyaTableMutationsFactory(ryaContext); - - secondaryIndexers = conf.getAdditionalIndexers(); - - TableOperations tableOperations = connector.tableOperations(); - AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getSpo()); - AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getPo()); - AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getOsp()); - AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getNs()); - - for (AccumuloIndexer index : secondaryIndexers) { - index.setConf(conf); - } - - mt_bw = connector.createMultiTableBatchWriter(batchWriterConfig); - - //get the batch writers for tables - bw_spo = mt_bw.getBatchWriter(tableLayoutStrategy.getSpo()); - bw_po = mt_bw.getBatchWriter(tableLayoutStrategy.getPo()); - bw_osp = mt_bw.getBatchWriter(tableLayoutStrategy.getOsp()); - - bw_ns = connector.createBatchWriter(tableLayoutStrategy.getNs(), MAX_MEMORY, - MAX_TIME, 1); - - for (AccumuloIndexer index : secondaryIndexers) { - index.setMultiTableBatchWriter(mt_bw); - } - - queryEngine = new AccumuloRyaQueryEngine(connector, conf); - - checkVersion(); - - initialized = true; - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - public String getVersion() throws RyaDAOException { - String version = null; - CloseableIteration versIter = queryEngine.query(new RyaStatement(RTS_SUBJECT_RYA, RTS_VERSION_PREDICATE_RYA, null), conf); - if (versIter.hasNext()) { - version = versIter.next().getObject().getData(); - } - versIter.close(); - - return version; - } - - @Override - public void add(RyaStatement statement) throws RyaDAOException { - commit(Iterators.singletonIterator(statement)); - } - - @Override - public void add(Iterator iter) throws RyaDAOException { - commit(iter); - } - - @Override - public void delete(RyaStatement stmt, AccumuloRdfConfiguration aconf) throws RyaDAOException { - this.delete(Iterators.singletonIterator(stmt), aconf); - //TODO currently all indexers do not support delete - } - - @Override - public void delete(Iterator statements, AccumuloRdfConfiguration conf) throws RyaDAOException { - try { - while (statements.hasNext()) { - RyaStatement stmt = statements.next(); - //query first - CloseableIteration query = this.queryEngine.query(stmt, conf); - while (query.hasNext()) { - deleteSingleRyaStatement(query.next()); - } - } - mt_bw.flush(); - //TODO currently all indexers do not support delete - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @Override - public void dropGraph(AccumuloRdfConfiguration conf, RyaURI... graphs) throws RyaDAOException { - BatchDeleter bd_spo = null; - BatchDeleter bd_po = null; - BatchDeleter bd_osp = null; - - try { - bd_spo = createBatchDeleter(tableLayoutStrategy.getSpo(), conf.getAuthorizations()); - bd_po = createBatchDeleter(tableLayoutStrategy.getPo(), conf.getAuthorizations()); - bd_osp = createBatchDeleter(tableLayoutStrategy.getOsp(), conf.getAuthorizations()); - - bd_spo.setRanges(Collections.singleton(new Range())); - bd_po.setRanges(Collections.singleton(new Range())); - bd_osp.setRanges(Collections.singleton(new Range())); - - for (RyaURI graph : graphs){ - bd_spo.fetchColumnFamily(new Text(graph.getData())); - bd_po.fetchColumnFamily(new Text(graph.getData())); - bd_osp.fetchColumnFamily(new Text(graph.getData())); - } - - bd_spo.delete(); - bd_po.delete(); - bd_osp.delete(); - - //TODO indexers do not support delete-UnsupportedOperation Exception will be thrown -// for (AccumuloIndex index : secondaryIndexers) { -// index.dropGraph(graphs); -// } - - } catch (Exception e) { - throw new RyaDAOException(e); - } finally { - if (bd_spo != null) bd_spo.close(); - if (bd_po != null) bd_po.close(); - if (bd_osp != null) bd_osp.close(); - } - - } - - protected void deleteSingleRyaStatement(RyaStatement stmt) throws TripleRowResolverException, MutationsRejectedException { - Map map = ryaContext.serializeTriple(stmt); - bw_spo.addMutation(deleteMutation(map.get(TABLE_LAYOUT.SPO))); - bw_po.addMutation(deleteMutation(map.get(TABLE_LAYOUT.PO))); - bw_osp.addMutation(deleteMutation(map.get(TABLE_LAYOUT.OSP))); - - } - - protected Mutation deleteMutation(TripleRow tripleRow) { - Mutation m = new Mutation(new Text(tripleRow.getRow())); - - byte[] columnFamily = tripleRow.getColumnFamily(); - Text cfText = columnFamily == null ? EMPTY_TEXT : new Text(columnFamily); - - byte[] columnQualifier = tripleRow.getColumnQualifier(); - Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier); - - m.putDelete(cfText, cqText, new ColumnVisibility(tripleRow.getColumnVisibility()), - tripleRow.getTimestamp()); - return m; - } - - protected void commit(Iterator commitStatements) throws RyaDAOException { - try { - //TODO: Should have a lock here in case we are adding and committing at the same time - while (commitStatements.hasNext()) { - RyaStatement stmt = commitStatements.next(); - - Map> mutationMap = ryaTableMutationsFactory.serialize(stmt); - Collection spo = mutationMap.get(TABLE_LAYOUT.SPO); - Collection po = mutationMap.get(TABLE_LAYOUT.PO); - Collection osp = mutationMap.get(TABLE_LAYOUT.OSP); - bw_spo.addMutations(spo); - bw_po.addMutations(po); - bw_osp.addMutations(osp); - - for (AccumuloIndexer index : secondaryIndexers) { - index.storeStatement(stmt); - } - } - - mt_bw.flush(); - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @Override - public void destroy() throws RyaDAOException { - if (!initialized) { - return; - } - //TODO: write lock - try { - initialized = false; - mt_bw.flush(); - bw_ns.flush(); - - mt_bw.close(); - bw_ns.close(); - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @Override - public void addNamespace(String pfx, String namespace) throws RyaDAOException { - try { - Mutation m = new Mutation(new Text(pfx)); - m.put(INFO_NAMESPACE_TXT, EMPTY_TEXT, new Value(namespace.getBytes())); - bw_ns.addMutation(m); - bw_ns.flush(); - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @Override - public String getNamespace(String pfx) throws RyaDAOException { - try { - Scanner scanner = connector.createScanner(tableLayoutStrategy.getNs(), - ALL_AUTHORIZATIONS); - scanner.fetchColumn(INFO_NAMESPACE_TXT, EMPTY_TEXT); - scanner.setRange(new Range(new Text(pfx))); - Iterator> iterator = scanner - .iterator(); - - if (iterator.hasNext()) { - return new String(iterator.next().getValue().get()); - } - } catch (Exception e) { - throw new RyaDAOException(e); - } - return null; - } - - @Override - public void removeNamespace(String pfx) throws RyaDAOException { - try { - Mutation del = new Mutation(new Text(pfx)); - del.putDelete(INFO_NAMESPACE_TXT, EMPTY_TEXT); - bw_ns.addMutation(del); - bw_ns.flush(); - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Override - public CloseableIteration iterateNamespace() throws RyaDAOException { - try { - Scanner scanner = connector.createScanner(tableLayoutStrategy.getNs(), - ALL_AUTHORIZATIONS); - scanner.fetchColumnFamily(INFO_NAMESPACE_TXT); - Iterator> result = scanner.iterator(); - return new AccumuloNamespaceTableIterator(result); - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @Override - public RyaNamespaceManager getNamespaceManager() { - return this; - } - - @Override - public void purge(RdfCloudTripleStoreConfiguration configuration) { - for (String tableName : getTables()) { - try { - purge(tableName, configuration.getAuths()); - compact(tableName); - } catch (TableNotFoundException e) { - logger.error(e.getMessage()); - } catch (MutationsRejectedException e) { - logger.error(e.getMessage()); - } - } - } - - @Override - public void dropAndDestroy() throws RyaDAOException { - for (String tableName : getTables()) { - try { - drop(tableName); - } catch (AccumuloSecurityException e) { - logger.error(e.getMessage()); - throw new RyaDAOException(e); - } catch (AccumuloException e) { - logger.error(e.getMessage()); - throw new RyaDAOException(e); - } catch (TableNotFoundException e) { - logger.warn(e.getMessage()); - } - } - destroy(); - } - - public Connector getConnector() { - return connector; - } - - public void setConnector(Connector connector) { - this.connector = connector; - } - - public BatchWriterConfig getBatchWriterConfig(){ - return batchWriterConfig; - } - - public void setBatchWriterConfig(BatchWriterConfig batchWriterConfig) { - this.batchWriterConfig = batchWriterConfig; - } - - protected MultiTableBatchWriter getMultiTableBatchWriter(){ - return mt_bw; - } - - public AccumuloRdfConfiguration getConf() { - return conf; - } - - public void setConf(AccumuloRdfConfiguration conf) { - this.conf = conf; - } - - public RyaTableMutationsFactory getRyaTableMutationsFactory() { - return ryaTableMutationsFactory; - } - - public void setRyaTableMutationsFactory(RyaTableMutationsFactory ryaTableMutationsFactory) { - this.ryaTableMutationsFactory = ryaTableMutationsFactory; - } - - public AccumuloRyaQueryEngine getQueryEngine() { - return queryEngine; - } - - public void setQueryEngine(AccumuloRyaQueryEngine queryEngine) { - this.queryEngine = queryEngine; - } - - protected String[] getTables() { - // core tables - List tableNames = Lists.newArrayList( - tableLayoutStrategy.getSpo(), - tableLayoutStrategy.getPo(), - tableLayoutStrategy.getOsp(), - tableLayoutStrategy.getNs(), - tableLayoutStrategy.getEval()); - - // Additional Tables - for (AccumuloIndexer index : secondaryIndexers) { - tableNames.add(index.getTableName()); - } - - return tableNames.toArray(new String[]{}); - } - - private void purge(String tableName, String[] auths) throws TableNotFoundException, MutationsRejectedException { - if (tableExists(tableName)) { - logger.info("Purging accumulo table: " + tableName); - BatchDeleter batchDeleter = createBatchDeleter(tableName, new Authorizations(auths)); - try { - batchDeleter.setRanges(Collections.singleton(new Range())); - batchDeleter.delete(); - } finally { - batchDeleter.close(); - } - } - } - - private void compact(String tableName) { - logger.info("Requesting major compaction for table " + tableName); - try { - connector.tableOperations().compact(tableName, null, null, true, false); - } catch (Exception e) { - logger.error(e.getMessage()); - } - } - - private boolean tableExists(String tableName) { - return getConnector().tableOperations().exists(tableName); - } - - private BatchDeleter createBatchDeleter(String tableName, Authorizations authorizations) throws TableNotFoundException { - return connector.createBatchDeleter(tableName, authorizations, NUM_THREADS, MAX_MEMORY, MAX_TIME, NUM_THREADS); - } - - private void checkVersion() throws RyaDAOException { - String version = getVersion(); - if (version == null) { - this.add(getVersionRyaStatement()); - } - //TODO: Do a version check here - } - - protected RyaStatement getVersionRyaStatement() { - return new RyaStatement(RTS_SUBJECT_RYA, RTS_VERSION_PREDICATE_RYA, VERSION_RYA); - } - - private void drop(String tableName) throws TableNotFoundException, AccumuloException, AccumuloSecurityException { - logger.info("Dropping cloudbase table: " + tableName); - connector.tableOperations().delete(tableName); - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java deleted file mode 100644 index b5a4e8401..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.accumulo; - -// -//import com.google.common.io.ByteArrayDataOutput; -//import com.google.common.io.ByteStreams; -//import mvm.rya.api.RdfCloudTripleStoreUtils; -//import mvm.rya.api.domain.RangeValue; -//import org.apache.accumulo.core.data.Range; -//import org.apache.hadoop.io.Text; -//import org.openrdf.model.Value; -//import org.openrdf.model.ValueFactory; -//import org.openrdf.model.impl.ValueFactoryImpl; -// -//import java.io.IOException; -//import java.util.Map; -// -//import static mvm.rya.api.RdfCloudTripleStoreConstants.*; -//import static mvm.rya.api.RdfCloudTripleStoreUtils.CustomEntry; -// -///** -// * Class DefineTripleQueryRangeFactory -// * Date: Jun 2, 2011 -// * Time: 10:35:43 AM -// */ -//public class DefineTripleQueryRangeFactory { -// -// ValueFactory vf = ValueFactoryImpl.getInstance(); -// -// protected void fillRange(ByteArrayDataOutput startRowOut, ByteArrayDataOutput endRowOut, Value val, boolean empty) -// throws IOException { -// if(!empty) { -// startRowOut.write(DELIM_BYTES); -// endRowOut.write(DELIM_BYTES); -// } -// //null check? -// if(val instanceof RangeValue) { -// RangeValue rangeValue = (RangeValue) val; -// Value start = rangeValue.getStart(); -// Value end = rangeValue.getEnd(); -// byte[] start_val_bytes = RdfCloudTripleStoreUtils.writeValue(start); -// byte[] end_val_bytes = RdfCloudTripleStoreUtils.writeValue(end); -// startRowOut.write(start_val_bytes); -// endRowOut.write(end_val_bytes); -// } else { -// byte[] val_bytes = RdfCloudTripleStoreUtils.writeValue(val); -// startRowOut.write(val_bytes); -// endRowOut.write(val_bytes); -// } -// } -// -// public Map.Entry defineRange(Value subject, Value predicate, Value object, AccumuloRdfConfiguration conf) -// throws IOException { -// -// byte[] startrow, stoprow; -// ByteArrayDataOutput startRowOut = ByteStreams.newDataOutput(); -// ByteArrayDataOutput stopRowOut = ByteStreams.newDataOutput(); -// Range range; -// TABLE_LAYOUT tableLayout; -// -// if (subject != null) { -// /** -// * Case: s -// * Table: spo -// * Want this to be the first if statement since it will be most likely the most asked for table -// */ -// tableLayout = TABLE_LAYOUT.SPO; -// fillRange(startRowOut, stopRowOut, subject, true); -// if (predicate != null) { -// /** -// * Case: sp -// * Table: spo -// */ -// fillRange(startRowOut, stopRowOut, predicate, false); -// if (object != null) { -// /** -// * Case: spo -// * Table: spo -// */ -// fillRange(startRowOut, stopRowOut, object, false); -// } -// } else if (object != null) { -// /** -// * Case: so -// * Table: osp -// * Very rare case. Could have put this in the OSP if clause, but I wanted to reorder the if statement -// * for best performance. The SPO table probably gets the most scans, so I want it to be the first if -// * statement in the branch. -// */ -// tableLayout = TABLE_LAYOUT.OSP; -// startRowOut = ByteStreams.newDataOutput(); -// stopRowOut = ByteStreams.newDataOutput(); -// fillRange(startRowOut, stopRowOut, object, true); -// fillRange(startRowOut, stopRowOut, subject, false); -// } -// } else if (predicate != null) { -// /** -// * Case: p -// * Table: po -// * Wanted this to be the second if statement, since it will be the second most asked for table -// */ -// tableLayout = TABLE_LAYOUT.PO; -// fillRange(startRowOut, stopRowOut, predicate, true); -// if (object != null) { -// /** -// * Case: po -// * Table: po -// */ -// fillRange(startRowOut, stopRowOut, object, false); -// } -// } else if (object != null) { -// /** -// * Case: o -// * Table: osp -// * Probably a pretty rare scenario -// */ -// tableLayout = TABLE_LAYOUT.OSP; -// fillRange(startRowOut, stopRowOut, object, true); -// } else { -// tableLayout = TABLE_LAYOUT.SPO; -// stopRowOut.write(Byte.MAX_VALUE); -// } -// -// startrow = startRowOut.toByteArray(); -// stopRowOut.write(DELIM_STOP_BYTES); -// stoprow = stopRowOut.toByteArray(); -// Text startRowTxt = new Text(startrow); -// Text stopRowTxt = new Text(stoprow); -// range = new Range(startRowTxt, stopRowTxt); -// -// return new CustomEntry(tableLayout, range); -// } -// -//} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java deleted file mode 100644 index 574029eef..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java +++ /dev/null @@ -1,115 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE; - -import java.io.IOException; -import java.util.AbstractMap.SimpleEntry; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.hadoop.io.Text; - -public class RyaTableKeyValues { - public static final ColumnVisibility EMPTY_CV = new ColumnVisibility(); - public static final Text EMPTY_CV_TEXT = new Text(EMPTY_CV.getExpression()); - - RyaTripleContext instance; - - private RyaStatement stmt; - private Collection> spo = new ArrayList>(); - private Collection> po = new ArrayList>(); - private Collection> osp = new ArrayList>(); - - public RyaTableKeyValues(RyaStatement stmt, RdfCloudTripleStoreConfiguration conf) { - this.stmt = stmt; - this.instance = RyaTripleContext.getInstance(conf); - } - - public Collection> getSpo() { - return spo; - } - - public Collection> getPo() { - return po; - } - - public Collection> getOsp() { - return osp; - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - public RyaTableKeyValues invoke() throws IOException { - /** - * TODO: If there are contexts, do we still replicate the information into the default graph as well - * as the named graphs? - */try { - Map rowMap = instance.serializeTriple(stmt); - TripleRow tripleRow = rowMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - byte[] columnVisibility = tripleRow.getColumnVisibility(); - Text cv = columnVisibility == null ? EMPTY_CV_TEXT : new Text(columnVisibility); - Long timestamp = tripleRow.getTimestamp(); - timestamp = timestamp == null ? 0l : timestamp; - byte[] value = tripleRow.getValue(); - Value v = value == null ? EMPTY_VALUE : new Value(value); - spo.add(new SimpleEntry(new Key(new Text(tripleRow.getRow()), - new Text(tripleRow.getColumnFamily()), - new Text(tripleRow.getColumnQualifier()), - cv, timestamp), v)); - tripleRow = rowMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - po.add(new SimpleEntry(new Key(new Text(tripleRow.getRow()), - new Text(tripleRow.getColumnFamily()), - new Text(tripleRow.getColumnQualifier()), - cv, timestamp), v)); - tripleRow = rowMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); - osp.add(new SimpleEntry(new Key(new Text(tripleRow.getRow()), - new Text(tripleRow.getColumnFamily()), - new Text(tripleRow.getColumnQualifier()), - cv, timestamp), v)); - } catch (TripleRowResolverException e) { - throw new IOException(e); - } - return this; - } - - @Override - public String toString() { - return "RyaTableKeyValues{" + - "statement=" + stmt + - ", spo=" + spo + - ", po=" + po + - ", o=" + osp + - '}'; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java deleted file mode 100644 index 0dbafc128..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java +++ /dev/null @@ -1,102 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV; -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE; -import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.hadoop.io.Text; - -public class RyaTableMutationsFactory { - - RyaTripleContext ryaContext; - - public RyaTableMutationsFactory(RyaTripleContext ryaContext) { - this.ryaContext = ryaContext; - } - - //TODO: Does this still need to be collections - public Map> serialize( - RyaStatement stmt) throws IOException { - - Collection spo_muts = new ArrayList(); - Collection po_muts = new ArrayList(); - Collection osp_muts = new ArrayList(); - /** - * TODO: If there are contexts, do we still replicate the information into the default graph as well - * as the named graphs? - */ - try { - Map rowMap = ryaContext.serializeTriple(stmt); - TripleRow tripleRow = rowMap.get(TABLE_LAYOUT.SPO); - spo_muts.add(createMutation(tripleRow)); - tripleRow = rowMap.get(TABLE_LAYOUT.PO); - po_muts.add(createMutation(tripleRow)); - tripleRow = rowMap.get(TABLE_LAYOUT.OSP); - osp_muts.add(createMutation(tripleRow)); - } catch (TripleRowResolverException fe) { - throw new IOException(fe); - } - - Map> mutations = - new HashMap>(); - mutations.put(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, spo_muts); - mutations.put(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO, po_muts); - mutations.put(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP, osp_muts); - - return mutations; - } - - protected Mutation createMutation(TripleRow tripleRow) { - Mutation mutation = new Mutation(new Text(tripleRow.getRow())); - byte[] columnVisibility = tripleRow.getColumnVisibility(); - ColumnVisibility cv = columnVisibility == null ? EMPTY_CV : new ColumnVisibility(columnVisibility); - Long timestamp = tripleRow.getTimestamp(); - byte[] value = tripleRow.getValue(); - Value v = value == null ? EMPTY_VALUE : new Value(value); - byte[] columnQualifier = tripleRow.getColumnQualifier(); - Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier); - byte[] columnFamily = tripleRow.getColumnFamily(); - Text cfText = columnFamily == null ? EMPTY_TEXT : new Text(columnFamily); - - mutation.put(cfText, cqText, cv, timestamp, v); - return mutation; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java deleted file mode 100644 index 5df5da96b..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java +++ /dev/null @@ -1,59 +0,0 @@ -package mvm.rya.accumulo.experimental; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.IOException; -import java.util.Collection; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; - -import org.apache.accumulo.core.client.MultiTableBatchWriter; - -public abstract class AbstractAccumuloIndexer implements AccumuloIndexer { - - @Override - public void setMultiTableBatchWriter(MultiTableBatchWriter writer) throws IOException { - } - - @Override - public void storeStatements(Collection statements) throws IOException { - for (RyaStatement s : statements) { - storeStatement(s); - } - } - - @Override - public void deleteStatement(RyaStatement stmt) throws IOException { - } - - @Override - public void dropGraph(RyaURI... graphs) { - } - - @Override - public void flush() throws IOException { - } - - @Override - public void close() throws IOException { - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java deleted file mode 100644 index 232983171..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java +++ /dev/null @@ -1,33 +0,0 @@ -package mvm.rya.accumulo.experimental; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.IOException; - -import mvm.rya.api.persist.index.RyaSecondaryIndexer; - -import org.apache.accumulo.core.client.MultiTableBatchWriter; - -public interface AccumuloIndexer extends RyaSecondaryIndexer { - - public void setMultiTableBatchWriter(MultiTableBatchWriter writer) throws IOException; - -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java deleted file mode 100644 index 000c08a45..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java +++ /dev/null @@ -1,164 +0,0 @@ -package mvm.rya.accumulo.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.accumulo.AccumuloRdfConstants; -import mvm.rya.accumulo.mr.utils.AccumuloHDFSFileInputFormat; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.RdfCloudTripleStoreUtils; - -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.iterators.user.AgeOffFilter; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.Job; - -/** - */ -public abstract class AbstractAccumuloMRTool { - - protected Configuration conf; - protected RdfCloudTripleStoreConstants.TABLE_LAYOUT rdfTableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP; - protected String userName = "root"; - protected String pwd = "root"; - protected String instance = "instance"; - protected String zk = "zoo"; - protected Authorizations authorizations = AccumuloRdfConstants.ALL_AUTHORIZATIONS; - protected String ttl = null; - protected boolean mock = false; - protected boolean hdfsInput = false; - protected String tablePrefix = RdfCloudTripleStoreConstants.TBL_PRFX_DEF; - - protected void init() { - zk = conf.get(MRUtils.AC_ZK_PROP, zk); - ttl = conf.get(MRUtils.AC_TTL_PROP, ttl); - instance = conf.get(MRUtils.AC_INSTANCE_PROP, instance); - userName = conf.get(MRUtils.AC_USERNAME_PROP, userName); - pwd = conf.get(MRUtils.AC_PWD_PROP, pwd); - mock = conf.getBoolean(MRUtils.AC_MOCK_PROP, mock); - hdfsInput = conf.getBoolean(MRUtils.AC_HDFS_INPUT_PROP, hdfsInput); - tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, tablePrefix); - if (tablePrefix != null) - RdfCloudTripleStoreConstants.prefixTables(tablePrefix); - rdfTableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.valueOf( - conf.get(MRUtils.TABLE_LAYOUT_PROP, RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP.toString())); - String auth = conf.get(MRUtils.AC_AUTH_PROP); - if (auth != null) - authorizations = new Authorizations(auth.split(",")); - - if (!mock) { - conf.setBoolean("mapred.map.tasks.speculative.execution", false); - conf.setBoolean("mapred.reduce.tasks.speculative.execution", false); - conf.set("io.sort.mb", "256"); - } - - //set ttl - ttl = conf.get(MRUtils.AC_TTL_PROP); - } - - protected void setupInputFormat(Job job) throws AccumuloSecurityException { - // set up accumulo input - if (!hdfsInput) { - job.setInputFormatClass(AccumuloInputFormat.class); - } else { - job.setInputFormatClass(AccumuloHDFSFileInputFormat.class); - } - AccumuloInputFormat.setConnectorInfo(job, userName, new PasswordToken(pwd)); - AccumuloInputFormat.setInputTableName(job, RdfCloudTripleStoreUtils.layoutPrefixToTable(rdfTableLayout, tablePrefix)); - AccumuloInputFormat.setScanAuthorizations(job, authorizations); - if (!mock) { - AccumuloInputFormat.setZooKeeperInstance(job, instance, zk); - } else { - AccumuloInputFormat.setMockInstance(job, instance); - } - if (ttl != null) { - IteratorSetting setting = new IteratorSetting(1, "fi", AgeOffFilter.class.getName()); - AgeOffFilter.setTTL(setting, Long.valueOf(ttl)); - AccumuloInputFormat.addIterator(job, setting); - } - } - - protected void setupOutputFormat(Job job, String outputTable) throws AccumuloSecurityException { - AccumuloOutputFormat.setConnectorInfo(job, userName, new PasswordToken(pwd)); - AccumuloOutputFormat.setCreateTables(job, true); - AccumuloOutputFormat.setDefaultTableName(job, outputTable); - if (!mock) { - AccumuloOutputFormat.setZooKeeperInstance(job, instance, zk); - } else { - AccumuloOutputFormat.setMockInstance(job, instance); - } - job.setOutputFormatClass(AccumuloOutputFormat.class); - } - - public void setConf(Configuration configuration) { - this.conf = configuration; - } - - public Configuration getConf() { - return conf; - } - - public String getInstance() { - return instance; - } - - public void setInstance(String instance) { - this.instance = instance; - } - - public String getPwd() { - return pwd; - } - - public void setPwd(String pwd) { - this.pwd = pwd; - } - - public String getZk() { - return zk; - } - - public void setZk(String zk) { - this.zk = zk; - } - - public String getTtl() { - return ttl; - } - - public void setTtl(String ttl) { - this.ttl = ttl; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java deleted file mode 100644 index ee1004d48..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java +++ /dev/null @@ -1,258 +0,0 @@ -package mvm.rya.accumulo.mr.eval; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.Date; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRdfConstants; -import mvm.rya.accumulo.mr.AbstractAccumuloMRTool; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -import com.google.common.collect.Lists; -import com.google.common.io.ByteArrayDataInput; -import com.google.common.io.ByteArrayDataOutput; -import com.google.common.io.ByteStreams; - -/** - * Count subject, predicate, object. Save in table - * Class RdfCloudTripleStoreCountTool - * Date: Apr 12, 2011 - * Time: 10:39:40 AM - * @deprecated - */ -public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool { - - public static void main(String[] args) { - try { - - ToolRunner.run(new Configuration(), new AccumuloRdfCountTool(), args); - } catch (Exception e) { - e.printStackTrace(); - } - } - - /** - * cloudbase props - */ - - @Override - public int run(String[] strings) throws Exception { - conf.set(MRUtils.JOB_NAME_PROP, "Gather Evaluation Statistics"); - - //initialize - init(); - - Job job = new Job(conf); - job.setJarByClass(AccumuloRdfCountTool.class); - setupInputFormat(job); - - AccumuloInputFormat.setRanges(job, Lists.newArrayList(new Range(new Text(new byte[]{}), new Text(new byte[]{Byte.MAX_VALUE})))); - // set input output of the particular job - job.setMapOutputKeyClass(Text.class); - job.setMapOutputValueClass(LongWritable.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Mutation.class); - - // set mapper and reducer classes - job.setMapperClass(CountPiecesMapper.class); - job.setCombinerClass(CountPiecesCombiner.class); - job.setReducerClass(CountPiecesReducer.class); - - String outputTable = tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX; - setupOutputFormat(job, outputTable); - - // Submit the job - Date startTime = new Date(); - System.out.println("Job started: " + startTime); - int exitCode = job.waitForCompletion(true) ? 0 : 1; - - if (exitCode == 0) { - Date end_time = new Date(); - System.out.println("Job ended: " + end_time); - System.out.println("The job took " - + (end_time.getTime() - startTime.getTime()) / 1000 - + " seconds."); - return 0; - } else { - System.out.println("Job Failed!!!"); - } - - return -1; - } - - public static class CountPiecesMapper extends Mapper { - - public static final byte[] EMPTY_BYTES = new byte[0]; - private RdfCloudTripleStoreConstants.TABLE_LAYOUT tableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP; - - ValueFactoryImpl vf = new ValueFactoryImpl(); - - private Text keyOut = new Text(); - private LongWritable valOut = new LongWritable(1); - private RyaTripleContext ryaContext; - - @Override - protected void setup(Context context) throws IOException, InterruptedException { - super.setup(context); - Configuration conf = context.getConfiguration(); - tableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.valueOf( - conf.get(MRUtils.TABLE_LAYOUT_PROP, RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP.toString())); - ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(conf)); - } - - @Override - protected void map(Key key, Value value, Context context) throws IOException, InterruptedException { - try { - RyaStatement statement = ryaContext.deserializeTriple(tableLayout, new TripleRow(key.getRow().getBytes(), key.getColumnFamily().getBytes(), key.getColumnQualifier().getBytes())); - //count each piece subject, pred, object - - String subj = statement.getSubject().getData(); - String pred = statement.getPredicate().getData(); -// byte[] objBytes = tripleFormat.getValueFormat().serialize(statement.getObject()); - RyaURI scontext = statement.getContext(); - boolean includesContext = scontext != null; - String scontext_str = (includesContext) ? scontext.getData() : null; - - ByteArrayDataOutput output = ByteStreams.newDataOutput(); - output.writeUTF(subj); - output.writeUTF(RdfCloudTripleStoreConstants.SUBJECT_CF); - output.writeBoolean(includesContext); - if (includesContext) - output.writeUTF(scontext_str); - keyOut.set(output.toByteArray()); - context.write(keyOut, valOut); - - output = ByteStreams.newDataOutput(); - output.writeUTF(pred); - output.writeUTF(RdfCloudTripleStoreConstants.PRED_CF); - output.writeBoolean(includesContext); - if (includesContext) - output.writeUTF(scontext_str); - keyOut.set(output.toByteArray()); - context.write(keyOut, valOut); - } catch (TripleRowResolverException e) { - throw new IOException(e); - } - } - } - - public static class CountPiecesCombiner extends Reducer { - - private LongWritable valOut = new LongWritable(); - - // TODO: can still add up to be large I guess - // any count lower than this does not need to be saved - public static final int TOO_LOW = 2; - - @Override - protected void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException { - long count = 0; - for (LongWritable lw : values) { - count += lw.get(); - } - - if (count <= TOO_LOW) - return; - - valOut.set(count); - context.write(key, valOut); - } - - } - - public static class CountPiecesReducer extends Reducer { - - Text row = new Text(); - Text cat_txt = new Text(); - Value v_out = new Value(); - ValueFactory vf = new ValueFactoryImpl(); - - // any count lower than this does not need to be saved - public static final int TOO_LOW = 10; - private String tablePrefix; - protected Text table; - private ColumnVisibility cv = AccumuloRdfConstants.EMPTY_CV; - - @Override - protected void setup(Context context) throws IOException, InterruptedException { - super.setup(context); - tablePrefix = context.getConfiguration().get(MRUtils.TABLE_PREFIX_PROPERTY, RdfCloudTripleStoreConstants.TBL_PRFX_DEF); - table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - final String cv_s = context.getConfiguration().get(MRUtils.AC_CV_PROP); - if (cv_s != null) - cv = new ColumnVisibility(cv_s); - } - - @Override - protected void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException { - long count = 0; - for (LongWritable lw : values) { - count += lw.get(); - } - - if (count <= TOO_LOW) - return; - - ByteArrayDataInput badi = ByteStreams.newDataInput(key.getBytes()); - String v = badi.readUTF(); - cat_txt.set(badi.readUTF()); - - Text columnQualifier = RdfCloudTripleStoreConstants.EMPTY_TEXT; - boolean includesContext = badi.readBoolean(); - if (includesContext) { - columnQualifier = new Text(badi.readUTF()); - } - - row.set(v); - Mutation m = new Mutation(row); - v_out.set((count + "").getBytes()); - m.put(cat_txt, columnQualifier, cv, v_out); - context.write(table, m); - } - - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java deleted file mode 100644 index c3ddcfd70..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java +++ /dev/null @@ -1,369 +0,0 @@ -package mvm.rya.accumulo.mr.fileinput; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; -import static mvm.rya.accumulo.AccumuloRdfUtils.extractValue; -import static mvm.rya.accumulo.AccumuloRdfUtils.from; - -import java.io.BufferedOutputStream; -import java.io.IOException; -import java.io.PrintStream; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRdfConstants; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolver; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat; -import org.apache.accumulo.core.client.mapreduce.lib.partition.KeyRangePartitioner; -import org.apache.accumulo.core.client.mapreduce.lib.partition.RangePartitioner; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.util.TextUtil; -import org.apache.commons.codec.binary.Base64; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.openrdf.model.Statement; -import org.openrdf.rio.ParserConfig; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandler; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.nquads.NQuadsParser; - -import com.google.common.base.Preconditions; - -/** - * Take large ntrips files and use MapReduce and Cloudbase - * Bulk ingest techniques to load into the table in our partition format. - *

- * Input: NTrips file - * Map: - * - key : shard row - Text - * - value : stmt in doc triple format - Text - * Partitioner: RangePartitioner - * Reduce: - * - key : all the entries for each triple - Cloudbase Key - * Class BulkNtripsInputTool - * Date: Sep 13, 2011 - * Time: 10:00:17 AM - */ -public class BulkNtripsInputTool extends Configured implements Tool { - - public static final String WORKDIR_PROP = "bulk.n3.workdir"; - - private String userName = "root"; - private String pwd = "root"; - private String instance = "isntance"; - private String zk = "zoo"; - private String ttl = null; - private String workDirBase = "/temp/bulkcb/work"; - private String format = RDFFormat.NQUADS.getName(); - - @Override - public int run(final String[] args) throws Exception { - final Configuration conf = getConf(); - try { - //conf - zk = conf.get(MRUtils.AC_ZK_PROP, zk); - ttl = conf.get(MRUtils.AC_TTL_PROP, ttl); - instance = conf.get(MRUtils.AC_INSTANCE_PROP, instance); - userName = conf.get(MRUtils.AC_USERNAME_PROP, userName); - pwd = conf.get(MRUtils.AC_PWD_PROP, pwd); - workDirBase = conf.get(WORKDIR_PROP, workDirBase); - format = conf.get(MRUtils.FORMAT_PROP, format); - conf.set(MRUtils.FORMAT_PROP, format); - final String inputDir = args[0]; - - ZooKeeperInstance zooKeeperInstance = new ZooKeeperInstance(instance, zk); - Connector connector = zooKeeperInstance.getConnector(userName, new PasswordToken(pwd)); - TableOperations tableOperations = connector.tableOperations(); - - if (conf.get(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS) != null ) { - throw new IllegalArgumentException("Cannot use Bulk N Trips tool with Additional Indexers"); - } - - String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null); - if (tablePrefix != null) - RdfCloudTripleStoreConstants.prefixTables(tablePrefix); - String[] tables = {tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, - tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, - tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX}; - Collection jobs = new ArrayList(); - for (final String tableName : tables) { - PrintStream out = null; - try { - String workDir = workDirBase + "/" + tableName; - System.out.println("Loading data into table[" + tableName + "]"); - - Job job = new Job(new Configuration(conf), "Bulk Ingest load data to Generic RDF Table[" + tableName + "]"); - job.setJarByClass(this.getClass()); - //setting long job - Configuration jobConf = job.getConfiguration(); - jobConf.setBoolean("mapred.map.tasks.speculative.execution", false); - jobConf.setBoolean("mapred.reduce.tasks.speculative.execution", false); - jobConf.set("io.sort.mb", jobConf.get("io.sort.mb", "256")); - jobConf.setBoolean("mapred.compress.map.output", true); -// jobConf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); //TODO: I would like LZO compression - - job.setInputFormatClass(TextInputFormat.class); - - job.setMapperClass(ParseNtripsMapper.class); - job.setMapOutputKeyClass(Key.class); - job.setMapOutputValueClass(Value.class); - - job.setCombinerClass(OutStmtMutationsReducer.class); - job.setReducerClass(OutStmtMutationsReducer.class); - job.setOutputFormatClass(AccumuloFileOutputFormat.class); - // AccumuloFileOutputFormat.setZooKeeperInstance(jobConf, instance, zk); - - jobConf.set(ParseNtripsMapper.TABLE_PROPERTY, tableName); - - TextInputFormat.setInputPaths(job, new Path(inputDir)); - - FileSystem fs = FileSystem.get(conf); - Path workPath = new Path(workDir); - if (fs.exists(workPath)) - fs.delete(workPath, true); - - //make failures dir - Path failures = new Path(workDir, "failures"); - fs.delete(failures, true); - fs.mkdirs(new Path(workDir, "failures")); - - AccumuloFileOutputFormat.setOutputPath(job, new Path(workDir + "/files")); - - out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir + "/splits.txt")))); - - if (!tableOperations.exists(tableName)) - tableOperations.create(tableName); - Collection splits = tableOperations.getSplits(tableName, Integer.MAX_VALUE); - for (Text split : splits) - out.println(new String(Base64.encodeBase64(TextUtil.getBytes(split)))); - - job.setNumReduceTasks(splits.size() + 1); - out.close(); - - job.setPartitionerClass(KeyRangePartitioner.class); - RangePartitioner.setSplitFile(job, workDir + "/splits.txt"); - - jobConf.set(WORKDIR_PROP, workDir); - - job.submit(); - jobs.add(job); - - } catch (Exception re) { - throw new RuntimeException(re); - } finally { - if (out != null) - out.close(); - } - } - - for (Job job : jobs) { - while (!job.isComplete()) { - Thread.sleep(1000); - } - } - - for(String tableName : tables) { - String workDir = workDirBase + "/" + tableName; - String filesDir = workDir + "/files"; - String failuresDir = workDir + "/failures"; - - FileSystem fs = FileSystem.get(conf); - - //make sure that the "accumulo" user can read/write/execute into these directories this path - fs.setPermission(new Path(filesDir), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); - fs.setPermission(new Path(failuresDir), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); - - tableOperations.importDirectory( - tableName, - filesDir, - failuresDir, - false); - - } - - } catch (Exception e ){ - throw new RuntimeException(e); - } - - return 0; - } - - public static void main(String[] args) throws Exception { - ToolRunner.run(new Configuration(), new BulkNtripsInputTool(), args); - } - - /** - * input: ntrips format triple - *

- * output: key: shard row from generator - * value: stmt in serialized format (document format) - */ - public static class ParseNtripsMapper extends Mapper { - public static final String TABLE_PROPERTY = "parsentripsmapper.table"; - - private RDFParser parser; - private String rdfFormat; - private String namedGraph; - private RyaTripleContext ryaContext; - private TripleRowResolver rowResolver; - - @Override - protected void setup(final Context context) throws IOException, InterruptedException { - super.setup(context); - Configuration conf = context.getConfiguration(); - final String table = conf.get(TABLE_PROPERTY); - Preconditions.checkNotNull(table, "Set the " + TABLE_PROPERTY + " property on the map reduce job"); - this.ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(conf)); - rowResolver = ryaContext.getTripleResolver(); - - final String cv_s = conf.get(MRUtils.AC_CV_PROP); - final byte[] cv = cv_s == null ? null : cv_s.getBytes(); - rdfFormat = conf.get(MRUtils.FORMAT_PROP); - checkNotNull(rdfFormat, "Rdf format cannot be null"); - - namedGraph = conf.get(MRUtils.NAMED_GRAPH_PROP); - - parser = new NQuadsParser(); - parser.setParserConfig(new ParserConfig(true, true, true, RDFParser.DatatypeHandling.VERIFY)); - parser.setRDFHandler(new RDFHandler() { - - @Override - public void startRDF() throws RDFHandlerException { - - } - - @Override - public void endRDF() throws RDFHandlerException { - - } - - @Override - public void handleNamespace(String s, String s1) throws RDFHandlerException { - - } - - @Override - public void handleStatement(Statement statement) throws RDFHandlerException { - try { - RyaStatement rs = RdfToRyaConversions.convertStatement(statement); - if(rs.getColumnVisibility() == null) { - rs.setColumnVisibility(cv); - } - - // Inject the specified context into the statement. - if(namedGraph != null){ - rs.setContext(new RyaURI(namedGraph)); - } else if (statement.getContext() != null) { - rs.setContext(new RyaURI(statement.getContext().toString())); - } - - Map serialize = rowResolver.serialize(rs); - - if (table.contains(RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX)) { - TripleRow tripleRow = serialize.get(TABLE_LAYOUT.SPO); - context.write( - from(tripleRow), - extractValue(tripleRow) - ); - } else if (table.contains(RdfCloudTripleStoreConstants.TBL_PO_SUFFIX)) { - TripleRow tripleRow = serialize.get(TABLE_LAYOUT.PO); - context.write( - from(tripleRow), - extractValue(tripleRow) - ); - } else if (table.contains(RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX)) { - TripleRow tripleRow = serialize.get(TABLE_LAYOUT.OSP); - context.write( - from(tripleRow), - extractValue(tripleRow) - ); - } else - throw new IllegalArgumentException("Unrecognized table[" + table + "]"); - - } catch (Exception e) { - throw new RDFHandlerException(e); - } - } - - @Override - public void handleComment(String s) throws RDFHandlerException { - - } - }); - } - - @Override - public void map(LongWritable key, Text value, Context output) - throws IOException, InterruptedException { - String rdf = value.toString(); - try { - parser.parse(new StringReader(rdf), ""); - } catch (RDFParseException e) { - System.out.println("Line[" + rdf + "] cannot be formatted with format[" + rdfFormat + "]. Exception[" + e.getMessage() + "]"); - } catch (Exception e) { - e.printStackTrace(); - throw new IOException("Exception occurred parsing triple[" + rdf + "]"); - } - } - } - - public static class OutStmtMutationsReducer extends Reducer { - - public void reduce(Key key, Iterable values, Context output) - throws IOException, InterruptedException { - output.write(key, AccumuloRdfConstants.EMPTY_VALUE); - } - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java deleted file mode 100644 index 5a872a041..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java +++ /dev/null @@ -1,251 +0,0 @@ -package mvm.rya.accumulo.mr.fileinput; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.io.StringReader; -import java.util.Collection; -import java.util.Date; -import java.util.Map; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRdfConstants; -import mvm.rya.accumulo.RyaTableMutationsFactory; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaTripleContext; - -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Mutation; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.openrdf.model.Statement; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandler; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.Rio; - -/** - * Do bulk import of rdf files - * Class RdfFileInputTool2 - * Date: May 16, 2011 - * Time: 3:12:16 PM - */ -public class RdfFileInputByLineTool implements Tool { - - private Configuration conf = new Configuration(); - - private String userName = "root"; - private String pwd = "password"; - private String instance = "instance"; - private String zk = "zoo"; - private String tablePrefix = null; - private RDFFormat format = RDFFormat.NTRIPLES; - - public Configuration getConf() { - return conf; - } - - public void setConf(Configuration conf) { - this.conf = conf; - } - - public static void main(String[] args) { - try { - ToolRunner.run(new Configuration(), new RdfFileInputByLineTool(), args); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public long runJob(String[] args) throws IOException, ClassNotFoundException, InterruptedException, AccumuloSecurityException { - conf.setBoolean("mapred.map.tasks.speculative.execution", false); - conf.setBoolean("mapred.reduce.tasks.speculative.execution", false); - conf.set("io.sort.mb", "256"); - conf.setLong("mapred.task.timeout", 600000000); - - zk = conf.get(MRUtils.AC_ZK_PROP, zk); - instance = conf.get(MRUtils.AC_INSTANCE_PROP, instance); - userName = conf.get(MRUtils.AC_USERNAME_PROP, userName); - pwd = conf.get(MRUtils.AC_PWD_PROP, pwd); - format = RDFFormat.valueOf(conf.get(MRUtils.FORMAT_PROP, RDFFormat.NTRIPLES.toString())); - - String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, RdfCloudTripleStoreConstants.TBL_PRFX_DEF); - - Job job = new Job(conf); - job.setJarByClass(RdfFileInputByLineTool.class); - - // set up cloudbase input - job.setInputFormatClass(TextInputFormat.class); - FileInputFormat.addInputPath(job, new Path(args[0])); - - // set input output of the particular job - job.setMapOutputKeyClass(Text.class); - job.setMapOutputValueClass(Mutation.class); - - job.setOutputFormatClass(AccumuloOutputFormat.class); - AccumuloOutputFormat.setConnectorInfo(job, userName, new PasswordToken(pwd.getBytes())); - AccumuloOutputFormat.setCreateTables(job, true); - AccumuloOutputFormat.setDefaultTableName(job, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - AccumuloOutputFormat.setZooKeeperInstance(job, instance, zk); - - // set mapper and reducer classes - job.setMapperClass(TextToMutationMapper.class); - job.setNumReduceTasks(0); - - // Submit the job - Date startTime = new Date(); - System.out.println("Job started: " + startTime); - int exitCode = job.waitForCompletion(true) ? 0 : 1; - - if (exitCode == 0) { - Date end_time = new Date(); - System.out.println("Job ended: " + end_time); - System.out.println("The job took " - + (end_time.getTime() - startTime.getTime()) / 1000 - + " seconds."); - return job - .getCounters() - .findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_OUTPUT_RECORDS").getValue(); - } else { - System.out.println("Job Failed!!!"); - } - - return -1; - } - - @Override - public int run(String[] args) throws Exception { - return (int) runJob(args); - } - - public static class TextToMutationMapper extends Mapper { - protected RDFParser parser; - private String prefix; - private RDFFormat rdfFormat; - protected Text spo_table; - private Text po_table; - private Text osp_table; - private byte[] cv = AccumuloRdfConstants.EMPTY_CV.getExpression(); - - public TextToMutationMapper() { - } - - @Override - protected void setup(final Context context) throws IOException, InterruptedException { - super.setup(context); - Configuration conf = context.getConfiguration(); - prefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null); - if (prefix != null) { - RdfCloudTripleStoreConstants.prefixTables(prefix); - } - - spo_table = new Text(prefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - po_table = new Text(prefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - osp_table = new Text(prefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - - final String cv_s = conf.get(MRUtils.AC_CV_PROP); - if (cv_s != null) - cv = cv_s.getBytes(); - - rdfFormat = RDFFormat.valueOf(conf.get(MRUtils.FORMAT_PROP, RDFFormat.NTRIPLES.toString())); - parser = Rio.createParser(rdfFormat); - RyaTripleContext tripleContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(conf)); - final RyaTableMutationsFactory mut = new RyaTableMutationsFactory(tripleContext); - - parser.setRDFHandler(new RDFHandler() { - - @Override - public void startRDF() throws RDFHandlerException { - - } - - @Override - public void endRDF() throws RDFHandlerException { - - } - - @Override - public void handleNamespace(String s, String s1) throws RDFHandlerException { - - } - - @Override - public void handleStatement(Statement statement) throws RDFHandlerException { - try { - RyaStatement ryaStatement = RdfToRyaConversions.convertStatement(statement); - if(ryaStatement.getColumnVisibility() == null) { - ryaStatement.setColumnVisibility(cv); - } - Map> mutationMap = - mut.serialize(ryaStatement); - Collection spo = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - Collection po = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - Collection osp = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); - - for (Mutation m : spo) { - context.write(spo_table, m); - } - for (Mutation m : po) { - context.write(po_table, m); - } - for (Mutation m : osp) { - context.write(osp_table, m); - } - } catch (Exception e) { - throw new RDFHandlerException(e); - } - } - - @Override - public void handleComment(String s) throws RDFHandlerException { - - } - }); - } - - @Override - protected void map(LongWritable key, Text value, final Context context) throws IOException, InterruptedException { - try { - parser.parse(new StringReader(value.toString()), ""); - } catch (Exception e) { - throw new IOException(e); - } - } - - } -} - diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java deleted file mode 100644 index f20dfe3f2..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java +++ /dev/null @@ -1,146 +0,0 @@ -package mvm.rya.accumulo.mr.fileinput; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.domain.utils.RyaStatementWritable; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaTripleContext; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.RecordReader; -import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import org.apache.hadoop.mapreduce.lib.input.FileSplit; -import org.openrdf.model.Statement; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandler; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.Rio; - -/** - * Be able to input multiple rdf formatted files. Convert from rdf format to statements. - * Class RdfFileInputFormat - * Date: May 16, 2011 - * Time: 2:11:24 PM - */ -public class RdfFileInputFormat extends FileInputFormat { - - @Override - public RecordReader createRecordReader(InputSplit inputSplit, - TaskAttemptContext taskAttemptContext) - throws IOException, InterruptedException { - return new RdfFileRecordReader(); - } - - private class RdfFileRecordReader extends RecordReader implements RDFHandler { - - boolean closed = false; - long count = 0; - BlockingQueue queue = new LinkedBlockingQueue(); - int total = 0; - private RyaTripleContext tripleContext; - - - @Override - public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { - FileSplit fileSplit = (FileSplit) inputSplit; - Configuration conf = taskAttemptContext.getConfiguration(); - String rdfForm_s = conf.get(MRUtils.FORMAT_PROP, RDFFormat.RDFXML.getName()); - RDFFormat rdfFormat = RDFFormat.valueOf(rdfForm_s); - tripleContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(conf)); - - Path file = fileSplit.getPath(); - FileSystem fs = file.getFileSystem(conf); - FSDataInputStream fileIn = fs.open(fileSplit.getPath()); - - RDFParser rdfParser = Rio.createParser(rdfFormat); - rdfParser.setRDFHandler(this); - try { - rdfParser.parse(fileIn, ""); - } catch (Exception e) { - throw new IOException(e); - } - fileIn.close(); - total = queue.size(); - //TODO: Make this threaded so that you don't hold too many statements before sending them - } - - @Override - public boolean nextKeyValue() throws IOException, InterruptedException { - return queue.size() > 0; - } - - @Override - public LongWritable getCurrentKey() throws IOException, InterruptedException { - return new LongWritable(count++); - } - - @Override - public RyaStatementWritable getCurrentValue() throws IOException, InterruptedException { - return queue.poll(); - } - - @Override - public float getProgress() throws IOException, InterruptedException { - return ((float) (total - queue.size())) / ((float) total); - } - - @Override - public void close() throws IOException { - closed = true; - } - - @Override - public void startRDF() throws RDFHandlerException { - } - - @Override - public void endRDF() throws RDFHandlerException { - } - - @Override - public void handleNamespace(String s, String s1) throws RDFHandlerException { - } - - @Override - public void handleStatement(Statement statement) throws RDFHandlerException { - queue.add(new RyaStatementWritable(RdfToRyaConversions.convertStatement(statement), tripleContext)); - } - - @Override - public void handleComment(String s) throws RDFHandlerException { - } - } - -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java deleted file mode 100644 index 673d65fed..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java +++ /dev/null @@ -1,175 +0,0 @@ -package mvm.rya.accumulo.mr.fileinput; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV; - -import java.io.IOException; -import java.util.Collection; -import java.util.Date; -import java.util.Map; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.RyaTableMutationsFactory; -import mvm.rya.accumulo.mr.AbstractAccumuloMRTool; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.utils.RyaStatementWritable; -import mvm.rya.api.resolver.RyaTripleContext; - -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.data.Mutation; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.openrdf.rio.RDFFormat; - -/** - * Do bulk import of rdf files - * Class RdfFileInputTool - * Date: May 16, 2011 - * Time: 3:12:16 PM - */ -public class RdfFileInputTool extends AbstractAccumuloMRTool implements Tool { - - private String format = RDFFormat.RDFXML.getName(); - - public static void main(String[] args) { - try { - ToolRunner.run(new Configuration(), new RdfFileInputTool(), args); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public long runJob(String[] args) throws IOException, ClassNotFoundException, InterruptedException, AccumuloSecurityException { - conf.set(MRUtils.JOB_NAME_PROP, "Rdf File Input"); - //faster - init(); - format = conf.get(MRUtils.FORMAT_PROP, format); - conf.set(MRUtils.FORMAT_PROP, format); - - String inputPath = conf.get(MRUtils.INPUT_PATH, args[0]); - - Job job = new Job(conf); - job.setJarByClass(RdfFileInputTool.class); - - // set up cloudbase input - job.setInputFormatClass(RdfFileInputFormat.class); - RdfFileInputFormat.addInputPath(job, new Path(inputPath)); - - // set input output of the particular job - job.setMapOutputKeyClass(LongWritable.class); - job.setMapOutputValueClass(RyaStatementWritable.class); - - setupOutputFormat(job, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - - // set mapper and reducer classes - job.setMapperClass(StatementToMutationMapper.class); - job.setNumReduceTasks(0); - - // Submit the job - Date startTime = new Date(); - System.out.println("Job started: " + startTime); - int exitCode = job.waitForCompletion(true) ? 0 : 1; - - if (exitCode == 0) { - Date end_time = new Date(); - System.out.println("Job ended: " + end_time); - System.out.println("The job took " - + (end_time.getTime() - startTime.getTime()) / 1000 - + " seconds."); - return job - .getCounters() - .findCounter("org.apache.hadoop.mapred.Task$Counter", - "REDUCE_OUTPUT_RECORDS").getValue(); - } else { - System.out.println("Job Failed!!!"); - } - - return -1; - } - - @Override - public int run(String[] args) throws Exception { - runJob(args); - return 0; - } - - public static class StatementToMutationMapper extends Mapper { - protected String tablePrefix; - protected Text spo_table; - protected Text po_table; - protected Text osp_table; - private byte[] cv = EMPTY_CV.getExpression(); - RyaTableMutationsFactory mut; - - public StatementToMutationMapper() { - } - - @Override - protected void setup(Context context) throws IOException, InterruptedException { - super.setup(context); - Configuration conf = context.getConfiguration(); - mut = new RyaTableMutationsFactory(RyaTripleContext.getInstance(new AccumuloRdfConfiguration(conf))); - tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, RdfCloudTripleStoreConstants.TBL_PRFX_DEF); - spo_table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - po_table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - osp_table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - - final String cv_s = conf.get(MRUtils.AC_CV_PROP); - if (cv_s != null) - cv = cv_s.getBytes(); - } - - @Override - protected void map(LongWritable key, RyaStatementWritable value, Context context) throws IOException, InterruptedException { - RyaStatement statement = value.getRyaStatement(); - if (statement.getColumnVisibility() == null) { - statement.setColumnVisibility(cv); - } - Map> mutationMap = - mut.serialize(statement); - Collection spo = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO); - Collection po = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO); - Collection osp = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP); - - for (Mutation m : spo) { - context.write(spo_table, m); - } - for (Mutation m : po) { - context.write(po_table, m); - } - for (Mutation m : osp) { - context.write(osp_table, m); - } - } - - } -} - diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java deleted file mode 100644 index 89f0aa5cd..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java +++ /dev/null @@ -1,240 +0,0 @@ -package mvm.rya.accumulo.mr.upgrade; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.accumulo.mr.AbstractAccumuloMRTool; -import mvm.rya.accumulo.mr.utils.MRUtils; -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.iterators.user.RegExFilter; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.calrissian.mango.types.LexiTypeEncoders; -import org.calrissian.mango.types.TypeEncoder; - -import java.io.IOException; -import java.util.Date; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.*; - -/** - */ -public class Upgrade322Tool extends AbstractAccumuloMRTool implements Tool { - @Override - public int run(String[] strings) throws Exception { - conf.set(MRUtils.JOB_NAME_PROP, "Upgrade to Rya 3.2.2"); - //faster - init(); - - Job job = new Job(conf); - job.setJarByClass(Upgrade322Tool.class); - - setupInputFormat(job); - AccumuloInputFormat.setInputTableName(job, tablePrefix + TBL_OSP_SUFFIX); - - //we do not need to change any row that is a string, custom, or uri type - IteratorSetting regex = new IteratorSetting(30, "regex", - RegExFilter.class); - RegExFilter.setRegexs(regex, "\\w*" + TYPE_DELIM + "[\u0003|\u0008|\u0002]", null, null, null, false); - RegExFilter.setNegate(regex, true); - - // set input output of the particular job - job.setMapOutputKeyClass(Text.class); - job.setMapOutputValueClass(Mutation.class); - - setupOutputFormat(job, tablePrefix + - TBL_SPO_SUFFIX); - - // set mapper and reducer classes - job.setMapperClass(Upgrade322Mapper.class); - job.setReducerClass(Reducer.class); - - // Submit the job - return job.waitForCompletion(true) ? 0 : 1; - } - - public static void main(String[] args) { - try { - ToolRunner.run(new Configuration(), new Upgrade322Tool(), args); - } catch (Exception e) { - e.printStackTrace(); - } - } - - /** - * Reading from the OSP table - */ - public static class Upgrade322Mapper extends Mapper { - - private String tablePrefix; - private Text spoTable; - private Text poTable; - private Text ospTable; - - private final UpgradeObjectSerialization upgradeObjectSerialization; - - public Upgrade322Mapper() { - this(new UpgradeObjectSerialization()); - } - - public Upgrade322Mapper( - UpgradeObjectSerialization upgradeObjectSerialization) { - this.upgradeObjectSerialization = upgradeObjectSerialization; - } - - @Override - protected void setup( - Context context) throws IOException, InterruptedException { - super.setup(context); - - tablePrefix = context.getConfiguration().get( - MRUtils.TABLE_PREFIX_PROPERTY, TBL_PRFX_DEF); - spoTable = new Text(tablePrefix + TBL_SPO_SUFFIX); - poTable = new Text(tablePrefix + TBL_PO_SUFFIX); - ospTable = new Text(tablePrefix + TBL_OSP_SUFFIX); - } - - @Override - protected void map( - Key key, Value value, Context context) - throws IOException, InterruptedException { - - //read the key, expect OSP - final String row = key.getRow().toString(); - final int firstDelim = row.indexOf(DELIM); - final int secondDelim = row.indexOf(DELIM, firstDelim + 1); - final int typeDelim = row.lastIndexOf(TYPE_DELIM); - final String oldSerialization = row.substring(0, firstDelim); - char typeMarker = row.charAt(row.length() - 1); - - final String subject = row.substring(firstDelim + 1, secondDelim); - final String predicate = row.substring(secondDelim + 1, typeDelim); - final String typeSuffix = TYPE_DELIM + typeMarker; - - String newSerialization = upgradeObjectSerialization.upgrade(oldSerialization, typeMarker); - if(newSerialization == null) { - return; - } - - //write out delete Mutations - Mutation deleteOldSerialization_osp = new Mutation(key.getRow()); - deleteOldSerialization_osp.putDelete(key.getColumnFamily(), key.getColumnQualifier(), - key.getColumnVisibilityParsed()); - Mutation deleteOldSerialization_po = new Mutation(predicate + DELIM + oldSerialization + DELIM + subject + typeSuffix); - deleteOldSerialization_po.putDelete(key.getColumnFamily(), - key.getColumnQualifier(), - key.getColumnVisibilityParsed()); - Mutation deleteOldSerialization_spo = new Mutation(subject + DELIM + predicate + DELIM + oldSerialization + typeSuffix); - deleteOldSerialization_spo.putDelete(key.getColumnFamily(), key.getColumnQualifier(), - key.getColumnVisibilityParsed()); - - //write out new serialization - Mutation putNewSerialization_osp = new Mutation(newSerialization + DELIM + subject + DELIM + predicate + typeSuffix); - putNewSerialization_osp.put(key.getColumnFamily(), - key.getColumnQualifier(), - key.getColumnVisibilityParsed(), - key.getTimestamp(), value); - Mutation putNewSerialization_po = new Mutation(predicate + DELIM + newSerialization + DELIM + subject + typeSuffix); - putNewSerialization_po.put(key.getColumnFamily(), - key.getColumnQualifier(), - key.getColumnVisibilityParsed(), - key.getTimestamp(), value); - Mutation putNewSerialization_spo = new Mutation(subject + DELIM + predicate + DELIM + newSerialization + typeSuffix); - putNewSerialization_spo.put(key.getColumnFamily(), - key.getColumnQualifier(), - key.getColumnVisibilityParsed(), - key.getTimestamp(), value); - - //write out deletes to all tables - context.write(ospTable, deleteOldSerialization_osp); - context.write(poTable, deleteOldSerialization_po); - context.write(spoTable, deleteOldSerialization_spo); - - //write out inserts to all tables - context.write(ospTable, putNewSerialization_osp); - context.write(poTable, putNewSerialization_po); - context.write(spoTable, putNewSerialization_spo); - } - } - - public static class UpgradeObjectSerialization { - - public static final TypeEncoder - BOOLEAN_STRING_TYPE_ENCODER = LexiTypeEncoders.booleanEncoder(); - public static final TypeEncoder BYTE_STRING_TYPE_ENCODER - = LexiTypeEncoders.byteEncoder(); - public static final TypeEncoder DATE_STRING_TYPE_ENCODER - = LexiTypeEncoders.dateEncoder(); - public static final TypeEncoder - INTEGER_STRING_TYPE_ENCODER = LexiTypeEncoders.integerEncoder(); - public static final TypeEncoder LONG_STRING_TYPE_ENCODER - = LexiTypeEncoders.longEncoder(); - public static final TypeEncoder - DOUBLE_STRING_TYPE_ENCODER = LexiTypeEncoders.doubleEncoder(); - - public String upgrade(String object, int typeMarker) { - switch(typeMarker) { - case 10: //boolean - final boolean bool = Boolean.parseBoolean(object); - return BOOLEAN_STRING_TYPE_ENCODER.encode(bool); - case 9: //byte - final byte b = Byte.parseByte(object); - return BYTE_STRING_TYPE_ENCODER.encode(b); - case 4: //long - final Long lng = Long.parseLong(object); - return LONG_STRING_TYPE_ENCODER.encode(lng); - case 5: //int - final Integer i = Integer.parseInt(object); - return INTEGER_STRING_TYPE_ENCODER.encode(i); - case 6: //double - String exp = object.substring(2, 5); - char valueSign = object.charAt(0); - char expSign = object.charAt(1); - Integer expInt = Integer.parseInt(exp); - if (expSign == '-') { - expInt = 999 - expInt; - } - final String expDoubleStr = - String.format("%s%sE%s%d", valueSign, - object.substring(6), - expSign, expInt); - return DOUBLE_STRING_TYPE_ENCODER - .encode(Double.parseDouble(expDoubleStr)); - case 7: //datetime - //check to see if it is an early release that includes the exact term xsd:dateTime - final Long l = Long.MAX_VALUE - Long.parseLong(object); - Date date = new Date(l); - return DATE_STRING_TYPE_ENCODER.encode(date); - default: - return null; - } - } - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java deleted file mode 100644 index c9dac6bff..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java +++ /dev/null @@ -1,206 +0,0 @@ -package mvm.rya.accumulo.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.conf.AccumuloConfiguration; -import org.apache.accumulo.core.conf.Property; -import org.apache.accumulo.core.data.ByteSequence; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.file.FileSKVIterator; -import org.apache.accumulo.core.file.rfile.RFileOperations; -import org.apache.accumulo.core.util.ArgumentChecker; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.BlockLocation; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.RecordReader; -import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import org.apache.hadoop.mapreduce.lib.input.FileSplit; -import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; - -/** - * Finds the accumulo tablet files on the hdfs disk, and uses that as the input for MR jobs - * Date: 5/11/12 - * Time: 2:04 PM - */ -public class AccumuloHDFSFileInputFormat extends FileInputFormat { - - public static final Range ALLRANGE = new Range(new Text("\u0000"), new Text("\uFFFD")); - - @Override - public List getSplits(JobContext jobContext) throws IOException { - //read the params from AccumuloInputFormat - Configuration conf = jobContext.getConfiguration(); - Instance instance = AccumuloProps.getInstance(jobContext); - String user = AccumuloProps.getUsername(jobContext); - AuthenticationToken password = AccumuloProps.getPassword(jobContext); - String table = AccumuloProps.getTablename(jobContext); - ArgumentChecker.notNull(instance); - ArgumentChecker.notNull(table); - - //find the files necessary - try { - AccumuloConfiguration acconf = instance.getConfiguration(); - FileSystem fs = FileSystem.get(conf); - Connector connector = instance.getConnector(user, password); - TableOperations tos = connector.tableOperations(); - String tableId = tos.tableIdMap().get(table); - String filePrefix = acconf.get(Property.INSTANCE_DFS_DIR) + "/tables/" + tableId; - System.out.println(filePrefix); - - Scanner scanner = connector.createScanner("!METADATA", Constants.NO_AUTHS); //TODO: auths? - scanner.setRange(new Range(new Text(tableId + "\u0000"), new Text(tableId + "\uFFFD"))); - scanner.fetchColumnFamily(new Text("file")); - List files = new ArrayList(); - List fileSplits = new ArrayList(); - Job job = new Job(conf); - for (Map.Entry entry : scanner) { - String file = filePrefix + entry.getKey().getColumnQualifier().toString(); - files.add(file); - Path path = new Path(file); - FileStatus fileStatus = fs.getFileStatus(path); - long len = fileStatus.getLen(); - BlockLocation[] fileBlockLocations = fs.getFileBlockLocations(fileStatus, 0, len); - fileSplits.add(new FileSplit(path, 0, len, fileBlockLocations[0].getHosts())); -// FileInputFormat.addInputPath(job, path); - } - System.out.println(files); - return fileSplits; -// return super.getSplits(job); - } catch (Exception e) { - throw new IOException(e); - } - } - - @Override - public RecordReader createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { - return new RecordReader() { - - private FileSKVIterator fileSKVIterator; - - @Override - public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { - FileSplit split = (FileSplit) inputSplit; - Configuration job = taskAttemptContext.getConfiguration(); - Path file = split.getPath(); -// long start = split.getStart(); -// long length = split.getLength(); - FileSystem fs = file.getFileSystem(job); -// FSDataInputStream fileIn = fs.open(file); -// System.out.println(start); -// if (start != 0L) { -// fileIn.seek(start); -// } - Instance instance = AccumuloProps.getInstance(taskAttemptContext); - - fileSKVIterator = RFileOperations.getInstance().openReader(file.toString(), ALLRANGE, - new HashSet(), false, fs, job, instance.getConfiguration()); -// fileSKVIterator = new RFileOperations2().openReader(fileIn, length - start, job); - } - - @Override - public boolean nextKeyValue() throws IOException, InterruptedException { - fileSKVIterator.next(); - return fileSKVIterator.hasTop(); - } - - @Override - public Key getCurrentKey() throws IOException, InterruptedException { - return fileSKVIterator.getTopKey(); - } - - @Override - public Value getCurrentValue() throws IOException, InterruptedException { - return fileSKVIterator.getTopValue(); - } - - @Override - public float getProgress() throws IOException, InterruptedException { - return 0; - } - - @Override - public void close() throws IOException { - //To change body of implemented methods use File | Settings | File Templates. - } - }; - } - - public static void main(String[] args) { - try { - Job job = new Job(new Configuration()); - job.setJarByClass(AccumuloHDFSFileInputFormat.class); - Configuration conf = job.getConfiguration(); - conf.setBoolean("mapred.map.tasks.speculative.execution", false); - conf.setBoolean("mapred.reduce.tasks.speculative.execution", false); - AccumuloInputFormat.setConnectorInfo(job, "root", new PasswordToken("secret")); - AccumuloInputFormat.setInputTableName(job, "l_spo"); - AccumuloInputFormat.setScanAuthorizations(job, Constants.NO_AUTHS); - AccumuloInputFormat.setZooKeeperInstance(job, "acu13", "stratus25:2181"); - AccumuloInputFormat.setRanges(job, Collections.singleton(ALLRANGE)); - job.setMapperClass(NullMapper.class); - job.setNumReduceTasks(0); - job.setOutputFormatClass(NullOutputFormat.class); - if (args.length == 0) { - job.setInputFormatClass(AccumuloHDFSFileInputFormat.class); - } else { - job.setInputFormatClass(AccumuloInputFormat.class); - } - job.waitForCompletion(true); - } catch (Exception e) { - e.printStackTrace(); - } - } - - @SuppressWarnings("rawtypes") - public static class NullMapper extends Mapper { - @Override - protected void map(Object key, Object value, Context context) throws IOException, InterruptedException { - - } - } -} - diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java deleted file mode 100644 index 2b894404b..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java +++ /dev/null @@ -1,58 +0,0 @@ -package mvm.rya.accumulo.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; - -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.mapreduce.InputFormatBase; -import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.RecordReader; -import org.apache.hadoop.mapreduce.TaskAttemptContext; - -@SuppressWarnings("rawtypes") -public class AccumuloProps extends InputFormatBase { - - @Override - public RecordReader createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { - throw new UnsupportedOperationException("Accumulo Props just holds properties"); - } - - public static Instance getInstance(JobContext conf) { - return InputFormatBase.getInstance(conf); - } - - public static AuthenticationToken getPassword(JobContext conf) { - return InputFormatBase.getAuthenticationToken(conf); - } - - public static String getUsername(JobContext conf) { - return InputFormatBase.getPrincipal(conf); - } - - public static String getTablename(JobContext conf) { - return InputFormatBase.getInputTableName(conf); - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java deleted file mode 100644 index c3003d33c..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java +++ /dev/null @@ -1,119 +0,0 @@ -package mvm.rya.accumulo.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.model.URI; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -/** - * Class MRSailUtils - * Date: May 19, 2011 - * Time: 10:34:06 AM - */ -public class MRUtils { - - public static final String JOB_NAME_PROP = "mapred.job.name"; - - public static final String AC_USERNAME_PROP = "ac.username"; - public static final String AC_PWD_PROP = "ac.pwd"; - public static final String AC_ZK_PROP = "ac.zk"; - public static final String AC_INSTANCE_PROP = "ac.instance"; - public static final String AC_TTL_PROP = "ac.ttl"; - public static final String AC_TABLE_PROP = "ac.table"; - public static final String AC_AUTH_PROP = "ac.auth"; - public static final String AC_CV_PROP = "ac.cv"; - public static final String AC_MOCK_PROP = "ac.mock"; - public static final String AC_HDFS_INPUT_PROP = "ac.hdfsinput"; - public static final String HADOOP_IO_SORT_MB = "ac.hdfsinput"; - public static final String TABLE_LAYOUT_PROP = "rdf.tablelayout"; - public static final String FORMAT_PROP = "rdf.format"; - public static final String INPUT_PATH = "input"; - - public static final String NAMED_GRAPH_PROP = "rdf.graph"; - - public static final String TABLE_PREFIX_PROPERTY = "rdf.tablePrefix"; - - // rdf constants - public static final ValueFactory vf = new ValueFactoryImpl(); - public static final URI RDF_TYPE = vf.createURI("http://www.w3.org/1999/02/22-rdf-syntax-ns#", "type"); - - - // cloudbase map reduce utils - -// public static Range retrieveRange(URI entry_key, URI entry_val) throws IOException { -// ByteArrayDataOutput startRowOut = ByteStreams.newDataOutput(); -// startRowOut.write(RdfCloudTripleStoreUtils.writeValue(entry_key)); -// if (entry_val != null) { -// startRowOut.write(RdfCloudTripleStoreConstants.DELIM_BYTES); -// startRowOut.write(RdfCloudTripleStoreUtils.writeValue(entry_val)); -// } -// byte[] startrow = startRowOut.toByteArray(); -// startRowOut.write(RdfCloudTripleStoreConstants.DELIM_STOP_BYTES); -// byte[] stoprow = startRowOut.toByteArray(); -// -// Range range = new Range(new Text(startrow), new Text(stoprow)); -// return range; -// } - - - public static String getACTtl(Configuration conf) { - return conf.get(AC_TTL_PROP); - } - - public static String getACUserName(Configuration conf) { - return conf.get(AC_USERNAME_PROP); - } - - public static String getACPwd(Configuration conf) { - return conf.get(AC_PWD_PROP); - } - - public static String getACZK(Configuration conf) { - return conf.get(AC_ZK_PROP); - } - - public static String getACInstance(Configuration conf) { - return conf.get(AC_INSTANCE_PROP); - } - - public static void setACUserName(Configuration conf, String str) { - conf.set(AC_USERNAME_PROP, str); - } - - public static void setACPwd(Configuration conf, String str) { - conf.set(AC_PWD_PROP, str); - } - - public static void setACZK(Configuration conf, String str) { - conf.set(AC_ZK_PROP, str); - } - - public static void setACInstance(Configuration conf, String str) { - conf.set(AC_INSTANCE_PROP, str); - } - - public static void setACTtl(Configuration conf, String str) { - conf.set(AC_TTL_PROP, str); - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java deleted file mode 100644 index 1d0d9c9e1..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java +++ /dev/null @@ -1,402 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.api.RdfCloudTripleStoreUtils.layoutToTable; -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaRange; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.layout.TableLayoutStrategy; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.BatchRyaQuery; -import mvm.rya.api.persist.query.RyaQuery; -import mvm.rya.api.persist.query.RyaQueryEngine; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.query.strategy.TriplePatternStrategy; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRowRegex; -import mvm.rya.api.utils.CloseableIterableIteration; - -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.ScannerBase; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.iterators.user.RegExFilter; -import org.apache.accumulo.core.iterators.user.TimestampFilter; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.calrissian.mango.collect.CloseableIterable; -import org.calrissian.mango.collect.CloseableIterables; -import org.calrissian.mango.collect.FluentCloseableIterable; -import org.openrdf.query.BindingSet; - -import com.google.common.base.Function; -import com.google.common.base.Preconditions; -import com.google.common.collect.FluentIterable; -import com.google.common.collect.Iterators; - -/** - * Date: 7/17/12 - * Time: 9:28 AM - */ -public class AccumuloRyaQueryEngine implements RyaQueryEngine { - - private AccumuloRdfConfiguration configuration; - private Connector connector; - private RyaTripleContext ryaContext; - private final Map keyValueToRyaStatementFunctionMap = new HashMap(); - - public AccumuloRyaQueryEngine(Connector connector) { - this(connector, new AccumuloRdfConfiguration()); - } - - public AccumuloRyaQueryEngine(Connector connector, AccumuloRdfConfiguration conf) { - this.connector = connector; - this.configuration = conf; - ryaContext = RyaTripleContext.getInstance(conf); - keyValueToRyaStatementFunctionMap.put(TABLE_LAYOUT.SPO, new KeyValueToRyaStatementFunction(TABLE_LAYOUT.SPO, ryaContext)); - keyValueToRyaStatementFunctionMap.put(TABLE_LAYOUT.PO, new KeyValueToRyaStatementFunction(TABLE_LAYOUT.PO, ryaContext)); - keyValueToRyaStatementFunctionMap.put(TABLE_LAYOUT.OSP, new KeyValueToRyaStatementFunction(TABLE_LAYOUT.OSP, ryaContext)); - } - - @Override - public CloseableIteration query(RyaStatement stmt, AccumuloRdfConfiguration conf) throws RyaDAOException { - if (conf == null) { - conf = configuration; - } - - RyaQuery ryaQuery = RyaQuery.builder(stmt).load(conf).build(); - CloseableIterable results = query(ryaQuery); - - return new CloseableIterableIteration(results); - } - - protected String getData(RyaType ryaType) { - return (ryaType != null) ? (ryaType.getData()) : (null); - } - - @Override - public CloseableIteration, RyaDAOException> queryWithBindingSet(Collection> stmts, AccumuloRdfConfiguration conf) throws RyaDAOException { - if (conf == null) { - conf = configuration; - } - //query configuration - Authorizations authorizations = conf.getAuthorizations(); - Long ttl = conf.getTtl(); - Long maxResults = conf.getLimit(); - Integer maxRanges = conf.getMaxRangesForScanner(); - Integer numThreads = conf.getNumThreads(); - - //TODO: cannot span multiple tables here - try { - Collection ranges = new HashSet(); - RangeBindingSetEntries rangeMap = new RangeBindingSetEntries(); - TABLE_LAYOUT layout = null; - RyaURI context = null; - TriplePatternStrategy strategy = null; - for (Map.Entry stmtbs : stmts) { - RyaStatement stmt = stmtbs.getKey(); - context = stmt.getContext(); //TODO: This will be overwritten - BindingSet bs = stmtbs.getValue(); - strategy = ryaContext.retrieveStrategy(stmt); - if (strategy == null) { - throw new IllegalArgumentException("TriplePattern[" + stmt + "] not supported"); - } - - Map.Entry entry = - strategy.defineRange(stmt.getSubject(), stmt.getPredicate(), stmt.getObject(), stmt.getContext(), conf); - - //use range to set scanner - //populate scanner based on authorizations, ttl - layout = entry.getKey(); - ByteRange byteRange = entry.getValue(); - Range range = new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd())); - ranges.add(range); - rangeMap.ranges.add(new RdfCloudTripleStoreUtils.CustomEntry(range, bs)); - } - //no ranges - if (layout == null) return null; - String regexSubject = conf.getRegexSubject(); - String regexPredicate = conf.getRegexPredicate(); - String regexObject = conf.getRegexObject(); - TripleRowRegex tripleRowRegex = strategy.buildRegex(regexSubject, regexPredicate, regexObject, null, null); - - String table = layoutToTable(layout, conf); - boolean useBatchScanner = ranges.size() > maxRanges; - RyaStatementBindingSetKeyValueIterator iterator = null; - if (useBatchScanner) { - ScannerBase scanner = connector.createBatchScanner(table, authorizations, numThreads); - ((BatchScanner) scanner).setRanges(ranges); - fillScanner(scanner, context, null, ttl, null, tripleRowRegex, conf); - iterator = new RyaStatementBindingSetKeyValueIterator(layout, ryaContext, scanner, rangeMap); - } else { - Scanner scannerBase = null; - Iterator>[] iters = new Iterator[ranges.size()]; - int i = 0; - for (Range range : ranges) { - scannerBase = connector.createScanner(table, authorizations); - scannerBase.setRange(range); - fillScanner(scannerBase, context, null, ttl, null, tripleRowRegex, conf); - iters[i] = scannerBase.iterator(); - i++; - } - iterator = new RyaStatementBindingSetKeyValueIterator(layout, Iterators.concat(iters), rangeMap, ryaContext); - } - if (maxResults != null) { - iterator.setMaxResults(maxResults); - } - return iterator; - } catch (Exception e) { - throw new RyaDAOException(e); - } - - } - - @Override - public CloseableIteration batchQuery(Collection stmts, AccumuloRdfConfiguration conf) - throws RyaDAOException { - if (conf == null) { - conf = configuration; - } - - BatchRyaQuery batchRyaQuery = BatchRyaQuery.builder(stmts).load(conf).build(); - CloseableIterable results = query(batchRyaQuery); - - return new CloseableIterableIteration(results); - } - - @Override - public CloseableIterable query(RyaQuery ryaQuery) throws RyaDAOException { - Preconditions.checkNotNull(ryaQuery); - RyaStatement stmt = ryaQuery.getQuery(); - Preconditions.checkNotNull(stmt); - - //query configuration - String[] auths = ryaQuery.getAuths(); - Authorizations authorizations = auths != null ? new Authorizations(auths) : configuration.getAuthorizations(); - Long ttl = ryaQuery.getTtl(); - Long currentTime = ryaQuery.getCurrentTime(); - Long maxResults = ryaQuery.getMaxResults(); - Integer batchSize = ryaQuery.getBatchSize(); - String regexSubject = ryaQuery.getRegexSubject(); - String regexPredicate = ryaQuery.getRegexPredicate(); - String regexObject = ryaQuery.getRegexObject(); - TableLayoutStrategy tableLayoutStrategy = configuration.getTableLayoutStrategy(); - - try { - //find triple pattern range - TriplePatternStrategy strategy = ryaContext.retrieveStrategy(stmt); - TABLE_LAYOUT layout; - Range range; - RyaURI subject = stmt.getSubject(); - RyaURI predicate = stmt.getPredicate(); - RyaType object = stmt.getObject(); - RyaURI context = stmt.getContext(); - String qualifier = stmt.getQualifer(); - TripleRowRegex tripleRowRegex = null; - if (strategy != null) { - //otherwise, full table scan is supported - Map.Entry entry = - strategy.defineRange(subject, predicate, object, context, null); - layout = entry.getKey(); - ByteRange byteRange = entry.getValue(); - range = new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd())); - - byte[] objectTypeInfo = null; - if (object != null) { - //TODO: Not good to serialize this twice - if (object instanceof RyaRange) { - objectTypeInfo = RyaContext.getInstance().serializeType(((RyaRange) object).getStart())[1]; - } else { - objectTypeInfo = RyaContext.getInstance().serializeType(object)[1]; - } - } - - tripleRowRegex = strategy.buildRegex(regexSubject, regexPredicate, regexObject, null, objectTypeInfo); - } else { - range = new Range(); - layout = TABLE_LAYOUT.SPO; - } - - //use range to set scanner - //populate scanner based on authorizations, ttl - String table = layoutToTable(layout, tableLayoutStrategy); - Scanner scanner = connector.createScanner(table, authorizations); - scanner.setRange(range); - if (batchSize != null) { - scanner.setBatchSize(batchSize); - } - fillScanner(scanner, context, qualifier, ttl, currentTime, tripleRowRegex, ryaQuery.getConf()); - - FluentCloseableIterable results = FluentCloseableIterable.from(new ScannerBaseCloseableIterable(scanner)) - .transform(keyValueToRyaStatementFunctionMap.get(layout)); - if (maxResults != null) { - results = results.limit(maxResults.intValue()); - } - - return results; - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @Override - public CloseableIterable query(BatchRyaQuery ryaQuery) throws RyaDAOException { - Preconditions.checkNotNull(ryaQuery); - Iterable stmts = ryaQuery.getQueries(); - Preconditions.checkNotNull(stmts); - - //query configuration - String[] auths = ryaQuery.getAuths(); - final Authorizations authorizations = auths != null ? new Authorizations(auths) : configuration.getAuthorizations(); - final Long ttl = ryaQuery.getTtl(); - Long currentTime = ryaQuery.getCurrentTime(); - Long maxResults = ryaQuery.getMaxResults(); - Integer batchSize = ryaQuery.getBatchSize(); - Integer numQueryThreads = ryaQuery.getNumQueryThreads(); - String regexSubject = ryaQuery.getRegexSubject(); - String regexPredicate = ryaQuery.getRegexPredicate(); - String regexObject = ryaQuery.getRegexObject(); - TableLayoutStrategy tableLayoutStrategy = configuration.getTableLayoutStrategy(); - int maxRanges = ryaQuery.getMaxRanges(); - - //TODO: cannot span multiple tables here - try { - Collection ranges = new HashSet(); - TABLE_LAYOUT layout = null; - RyaURI context = null; - TriplePatternStrategy strategy = null; - for (RyaStatement stmt : stmts) { - context = stmt.getContext(); //TODO: This will be overwritten - strategy = ryaContext.retrieveStrategy(stmt); - if (strategy == null) { - throw new IllegalArgumentException("TriplePattern[" + stmt + "] not supported"); - } - - Map.Entry entry = - strategy.defineRange(stmt.getSubject(), stmt.getPredicate(), stmt.getObject(), stmt.getContext(), null); - - //use range to set scanner - //populate scanner based on authorizations, ttl - layout = entry.getKey(); - ByteRange byteRange = entry.getValue(); - Range range = new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd())); - ranges.add(range); - } - //no ranges - if (layout == null) throw new IllegalArgumentException("No table layout specified"); - - final TripleRowRegex tripleRowRegex = strategy.buildRegex(regexSubject, regexPredicate, regexObject, null, null); - - final String table = layoutToTable(layout, tableLayoutStrategy); - boolean useBatchScanner = ranges.size() > maxRanges; - FluentCloseableIterable results = null; - if (useBatchScanner) { - BatchScanner scanner = connector.createBatchScanner(table, authorizations, numQueryThreads); - scanner.setRanges(ranges); - fillScanner(scanner, context, null, ttl, null, tripleRowRegex, ryaQuery.getConf()); - results = FluentCloseableIterable.from(new ScannerBaseCloseableIterable(scanner)).transform(keyValueToRyaStatementFunctionMap.get(layout)); - } else { - final RyaURI fcontext = context; - final RdfCloudTripleStoreConfiguration fconf = ryaQuery.getConf(); - FluentIterable fluent = FluentIterable.from(ranges).transformAndConcat(new Function>>() { - @Override - public Iterable> apply(Range range) { - try { - Scanner scanner = connector.createScanner(table, authorizations); - scanner.setRange(range); - fillScanner(scanner, fcontext, null, ttl, null, tripleRowRegex, fconf); - return scanner; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - }).transform(keyValueToRyaStatementFunctionMap.get(layout)); - - results = FluentCloseableIterable.from(CloseableIterables.wrap(fluent)); - } - if (maxResults != null) { - results = results.limit(maxResults.intValue()); - } - return results; - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - protected void fillScanner(ScannerBase scanner, RyaURI context, String qualifier, Long ttl, Long currentTime, TripleRowRegex tripleRowRegex, RdfCloudTripleStoreConfiguration conf) throws IOException { - if (context != null && qualifier != null) { - scanner.fetchColumn(new Text(context.getData()), new Text(qualifier)); - } else if (context != null) { - scanner.fetchColumnFamily(new Text(context.getData())); - } else if (qualifier != null) { - IteratorSetting setting = new IteratorSetting(8, "riq", RegExFilter.class.getName()); - RegExFilter.setRegexs(setting, null, null, qualifier, null, false); - scanner.addScanIterator(setting); - } - if (ttl != null) { - IteratorSetting setting = new IteratorSetting(9, "fi", TimestampFilter.class.getName()); - TimestampFilter.setStart(setting, System.currentTimeMillis() - ttl, true); - if(currentTime != null){ - TimestampFilter.setStart(setting, currentTime - ttl, true); - TimestampFilter.setEnd(setting, currentTime, true); - } - scanner.addScanIterator(setting); - } - if (tripleRowRegex != null) { - IteratorSetting setting = new IteratorSetting(11, "ri", RegExFilter.class.getName()); - String regex = tripleRowRegex.getRow(); - RegExFilter.setRegexs(setting, regex, null, null, null, false); - scanner.addScanIterator(setting); - } - } - - @Override - public void setConf(AccumuloRdfConfiguration conf) { - this.configuration = conf; - } - - @Override - public AccumuloRdfConfiguration getConf() { - return configuration; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java deleted file mode 100644 index 2813438e2..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java +++ /dev/null @@ -1,72 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; - -import com.google.common.base.Function; - -/** - * Date: 1/30/13 - * Time: 2:09 PM - */ -public class KeyValueToRyaStatementFunction implements Function, RyaStatement> { - - private TABLE_LAYOUT tableLayout; - private RyaTripleContext context; - - public KeyValueToRyaStatementFunction(TABLE_LAYOUT tableLayout, RyaTripleContext context) { - this.tableLayout = tableLayout; - this.context = context; - } - - @Override - public RyaStatement apply(Map.Entry input) { - Key key = input.getKey(); - Value value = input.getValue(); - RyaStatement statement = null; - try { - statement = context.deserializeTriple(tableLayout, - new TripleRow(key.getRowData().toArray(), - key.getColumnFamilyData().toArray(), - key.getColumnQualifierData().toArray(), - key.getTimestamp(), - key.getColumnVisibilityData().toArray(), - (value != null) ? value.get() : null - )); - } catch (TripleRowResolverException e) { - throw new RuntimeException(e); - } - - return statement; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java deleted file mode 100644 index c59cb87ce..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java +++ /dev/null @@ -1,58 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.openrdf.query.BindingSet; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; - -/** - * Class RangeBindingSetCollection - * Date: Feb 23, 2011 - * Time: 10:15:48 AM - */ -public class RangeBindingSetEntries { - public Collection> ranges; - - public RangeBindingSetEntries() { - this(new ArrayList>()); - } - - public RangeBindingSetEntries(Collection> ranges) { - this.ranges = ranges; - } - - public Collection containsKey(Key key) { - //TODO: need to find a better way to sort these and pull - //TODO: maybe fork/join here - Collection bss = new ArrayList(); - for (Map.Entry entry : ranges) { - if (entry.getKey().contains(key)) - bss.add(entry.getValue()); - } - return bss; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java deleted file mode 100644 index b4333bd61..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java +++ /dev/null @@ -1,154 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; -import java.util.Iterator; -import java.util.Map; -import java.util.NoSuchElementException; - -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.ScannerBase; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.openrdf.query.BindingSet; - -/** - * Date: 7/17/12 - * Time: 11:48 AM - */ -public class RyaStatementBindingSetKeyValueIterator implements CloseableIteration, RyaDAOException> { - private Iterator> dataIterator; - private TABLE_LAYOUT tableLayout; - private Long maxResults = -1L; - private ScannerBase scanner; - private boolean isBatchScanner; - private RangeBindingSetEntries rangeMap; - private Iterator bsIter; - private RyaStatement statement; - private RyaTripleContext ryaContext; - - public RyaStatementBindingSetKeyValueIterator(TABLE_LAYOUT tableLayout, RyaTripleContext context, ScannerBase scannerBase, RangeBindingSetEntries rangeMap) { - this(tableLayout, ((scannerBase instanceof BatchScanner) ? ((BatchScanner) scannerBase).iterator() : ((Scanner) scannerBase).iterator()), rangeMap, context); - this.scanner = scannerBase; - isBatchScanner = scanner instanceof BatchScanner; - } - - public RyaStatementBindingSetKeyValueIterator(TABLE_LAYOUT tableLayout, Iterator> dataIterator, RangeBindingSetEntries rangeMap, RyaTripleContext ryaContext) { - this.tableLayout = tableLayout; - this.rangeMap = rangeMap; - this.dataIterator = dataIterator; - this.ryaContext = ryaContext; - } - - @Override - public void close() throws RyaDAOException { - dataIterator = null; - if (scanner != null && isBatchScanner) { - ((BatchScanner) scanner).close(); - } - } - - public boolean isClosed() throws RyaDAOException { - return dataIterator == null; - } - - @Override - public boolean hasNext() throws RyaDAOException { - if (isClosed()) { - return false; - } - if (maxResults != 0) { - if (bsIter != null && bsIter.hasNext()) { - return true; - } - if (dataIterator.hasNext()) { - return true; - } else { - maxResults = 0l; - return false; - } - } - return false; - } - - @Override - public Map.Entry next() throws RyaDAOException { - if (!hasNext() || isClosed()) { - throw new NoSuchElementException(); - } - - try { - while (true) { - if (bsIter != null && bsIter.hasNext()) { - maxResults--; - return new RdfCloudTripleStoreUtils.CustomEntry(statement, bsIter.next()); - } - - if (dataIterator.hasNext()) { - Map.Entry next = dataIterator.next(); - Key key = next.getKey(); - statement = ryaContext.deserializeTriple(tableLayout, - new TripleRow(key.getRowData().toArray(), key.getColumnFamilyData().toArray(), key.getColumnQualifierData().toArray(), - key.getTimestamp(), key.getColumnVisibilityData().toArray(), next.getValue().get())); - if (next.getValue() != null) { - statement.setValue(next.getValue().get()); - } - Collection bindingSets = rangeMap.containsKey(key); - if (!bindingSets.isEmpty()) { - bsIter = bindingSets.iterator(); - } - } else { - break; - } - } - return null; - } catch (TripleRowResolverException e) { - throw new RyaDAOException(e); - } - } - - @Override - public void remove() throws RyaDAOException { - next(); - } - - public Long getMaxResults() { - return maxResults; - } - - public void setMaxResults(Long maxResults) { - this.maxResults = maxResults; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java deleted file mode 100644 index f4c30810d..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java +++ /dev/null @@ -1,107 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Iterator; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; - -/** - * Date: 7/17/12 - * Time: 11:48 AM - */ -public class RyaStatementKeyValueIterator implements CloseableIteration { - private Iterator> dataIterator; - private TABLE_LAYOUT tableLayout; - private Long maxResults = -1L; - private RyaTripleContext context; - - public RyaStatementKeyValueIterator(TABLE_LAYOUT tableLayout, RyaTripleContext context, Iterator> dataIterator) { - this.tableLayout = tableLayout; - this.dataIterator = dataIterator; - this.context = context; - } - - @Override - public void close() throws RyaDAOException { - dataIterator = null; - } - - public boolean isClosed() throws RyaDAOException { - return dataIterator == null; - } - - @Override - public boolean hasNext() throws RyaDAOException { - if (isClosed()) { - throw new RyaDAOException("Closed Iterator"); - } - return maxResults != 0 && dataIterator.hasNext(); - } - - @Override - public RyaStatement next() throws RyaDAOException { - if (!hasNext()) { - return null; - } - - try { - Map.Entry next = dataIterator.next(); - Key key = next.getKey(); - RyaStatement statement = context.deserializeTriple(tableLayout, - new TripleRow(key.getRowData().toArray(), key.getColumnFamilyData().toArray(), key.getColumnQualifierData().toArray(), - key.getTimestamp(), key.getColumnVisibilityData().toArray(), next.getValue().get())); - if (next.getValue() != null) { - statement.setValue(next.getValue().get()); - } - maxResults--; - return statement; - } catch (TripleRowResolverException e) { - throw new RyaDAOException(e); - } - } - - @Override - public void remove() throws RyaDAOException { - next(); - } - - public Long getMaxResults() { - return maxResults; - } - - public void setMaxResults(Long maxResults) { - this.maxResults = maxResults; - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java deleted file mode 100644 index d2dcef936..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java +++ /dev/null @@ -1,56 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import org.apache.accumulo.core.client.ScannerBase; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.calrissian.mango.collect.AbstractCloseableIterable; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Map; - -/** - * Date: 1/30/13 - * Time: 2:15 PM - */ -public class ScannerBaseCloseableIterable extends AbstractCloseableIterable> { - - protected ScannerBase scanner; - - public ScannerBaseCloseableIterable(ScannerBase scanner) { - Preconditions.checkNotNull(scanner); - this.scanner = scanner; - } - - @Override - protected void doClose() throws IOException { - scanner.close(); - } - - @Override - protected Iterator> retrieveIterator() { - return scanner.iterator(); - } -} diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java deleted file mode 100644 index 97d2f54f7..000000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java +++ /dev/null @@ -1,87 +0,0 @@ -package mvm.rya.accumulo.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.iterators.Filter; -import org.apache.accumulo.core.iterators.IteratorEnvironment; -import org.apache.accumulo.core.iterators.OptionDescriber; -import org.apache.accumulo.core.iterators.SortedKeyValueIterator; - -import java.io.IOException; -import java.util.Map; -import java.util.TreeMap; - -/** - * Set the startTime and timeRange. The filter will only keyValues that - * are within the range [startTime - timeRange, startTime]. - */ -public class TimeRangeFilter extends Filter { - private long timeRange; - private long startTime; - public static final String TIME_RANGE_PROP = "timeRange"; - public static final String START_TIME_PROP = "startTime"; - - @Override - public boolean accept(Key k, Value v) { - long diff = startTime - k.getTimestamp(); - return !(diff > timeRange || diff < 0); - } - - @Override - public void init(SortedKeyValueIterator source, Map options, IteratorEnvironment env) throws IOException { - super.init(source, options, env); - if (options == null) { - throw new IllegalArgumentException("options must be set for TimeRangeFilter"); - } - - timeRange = -1; - String timeRange_s = options.get(TIME_RANGE_PROP); - if (timeRange_s == null) - throw new IllegalArgumentException("timeRange must be set for TimeRangeFilter"); - - timeRange = Long.parseLong(timeRange_s); - - String time = options.get(START_TIME_PROP); - if (time != null) - startTime = Long.parseLong(time); - else - startTime = System.currentTimeMillis(); - } - - @Override - public OptionDescriber.IteratorOptions describeOptions() { - Map options = new TreeMap(); - options.put(TIME_RANGE_PROP, "time range from the startTime (milliseconds)"); - options.put(START_TIME_PROP, "if set, use the given value as the absolute time in milliseconds as the start time in the time range."); - return new OptionDescriber.IteratorOptions("timeRangeFilter", "TimeRangeFilter removes entries with timestamps outside of the given time range: " + - "[startTime - timeRange, startTime]", - options, null); - } - - @Override - public boolean validateOptions(Map options) { - Long.parseLong(options.get(TIME_RANGE_PROP)); - return true; - } -} diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java deleted file mode 100644 index b7c907956..000000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java +++ /dev/null @@ -1,59 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.security.Authorizations; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * Date: 1/28/13 - * Time: 8:36 AM - */ -public class AccumuloRdfConfigurationTest { - private static final Logger logger = LoggerFactory.getLogger(AccumuloRdfConfigurationTest.class); - - @Test - public void testAuths() { - String[] arr = {"U", "FOUO"}; - String str = "U,FOUO"; - Authorizations auths = new Authorizations(arr); - - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - conf.setAuths(arr); - assertTrue(Arrays.equals(arr, conf.getAuths())); - assertEquals(str, conf.getAuth()); - assertEquals(auths, conf.getAuthorizations()); - - conf.setAuth(str); - assertTrue(Arrays.equals(arr, conf.getAuths())); - assertEquals(str, conf.getAuth()); - assertEquals(auths, conf.getAuthorizations()); - } -} diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java deleted file mode 100644 index ab4528be7..000000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java +++ /dev/null @@ -1,665 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.accumulo.query.AccumuloRyaQueryEngine; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.RyaQuery; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaContext; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.calrissian.mango.collect.CloseableIterable; -import org.calrissian.mango.collect.FluentCloseableIterable; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.XMLSchema; -import org.openrdf.query.BindingSet; - -import java.util.*; - -import static org.junit.Assert.*; - -/** - * Class AccumuloRdfDAOTest - * Date: Mar 7, 2012 - * Time: 9:42:28 AM - */ -public class AccumuloRyaDAOTest { - - private AccumuloRyaDAO dao; - private ValueFactory vf = new ValueFactoryImpl(); - static String litdupsNS = "urn:test:litdups#"; - private AccumuloRdfConfiguration conf; - private Connector connector; - - @Before - public void setUp() throws Exception { - dao = new AccumuloRyaDAO(); - connector = new MockInstance().getConnector("", ""); - dao.setConnector(connector); - conf = new AccumuloRdfConfiguration(); - dao.setConf(conf); - dao.init(); - } - - @After - public void tearDown() throws Exception { - dao.purge(conf); - dao.destroy(); - } - - @Test - public void testAdd() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaURI uri1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "uri1")); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - - CloseableIteration iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), conf); - int count = 0; - while (iter.hasNext()) { - assertTrue(uri1.equals(iter.next().getObject())); - count++; - } - iter.close(); - assertEquals(1, count); - - dao.delete(new RyaStatement(cpu, loadPerc, null), conf); - - iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), conf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testDeleteDiffVisibility() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaURI uri1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "uri1")); - RyaStatement stmt1 = new RyaStatement(cpu, loadPerc, uri1, null, "1", "vis1".getBytes()); - dao.add(stmt1); - RyaStatement stmt2 = new RyaStatement(cpu, loadPerc, uri1, null, "2", "vis2".getBytes()); - dao.add(stmt2); - - AccumuloRdfConfiguration cloneConf = conf.clone(); - cloneConf.setAuth("vis1,vis2"); - - CloseableIteration iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), cloneConf); - int count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(2, count); - - dao.delete(stmt1, cloneConf); - - iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), cloneConf); - count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(1, count); - } - - @Test - public void testDeleteDiffTimestamp() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaURI uri1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "uri1")); - RyaStatement stmt1 = new RyaStatement(cpu, loadPerc, uri1, null, "1", null, null, 100l); - dao.add(stmt1); - RyaStatement stmt2 = new RyaStatement(cpu, loadPerc, uri1, null, "2", null, null, 100l); - dao.add(stmt2); - - int resultSize = FluentCloseableIterable.from(dao.getQueryEngine().query( - RyaQuery.builder(new RyaStatement(cpu, loadPerc, null)).build())).autoClose().size(); - assertEquals(2, resultSize); - - final RyaStatement addStmt = new RyaStatement(cpu, loadPerc, uri1, null, "1", - null, null, 101l); - dao.delete(stmt1, conf); - dao.add(addStmt); - - resultSize = FluentCloseableIterable.from(dao.getQueryEngine().query( - RyaQuery.builder(new RyaStatement(cpu, loadPerc, null)).build())).autoClose().size(); - assertEquals(2, resultSize); //the delete marker should not delete the new stmt - } - - @Test - public void testDelete() throws Exception { - RyaURI predicate = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred")); - RyaURI subj = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "subj")); - - // create a "bulk load" of 10,000 statements - int statement_count = 10000; - for (int i = 0 ; i < statement_count ; i++){ - //make the statement very large so we will get a lot of random flushes - RyaURI obj = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, String.format("object%050d",i))); - RyaStatement stmt = new RyaStatement(subj, predicate, obj); - dao.add(stmt); - } - - CloseableIteration iter; - - //check to see if all of the statements made it to the subj table - //delete based on the data in the subj table - RyaStatement subjQuery = new RyaStatement(subj, null, null); - iter = dao.getQueryEngine().query(subjQuery, conf); - List stmts = new ArrayList(); - while (iter.hasNext()) { - stmts.add(iter.next()); - } - assertEquals(statement_count, stmts.size()); - dao.delete(stmts.iterator(), conf); - - // check statements in the predicate table - RyaStatement predQuery = new RyaStatement(null, predicate, null); - iter = dao.getQueryEngine().query(predQuery, conf); - int count = 0; - while (iter.hasNext()) { - count++; - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testAddEmptyString() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaType empty = new RyaType(""); - dao.add(new RyaStatement(cpu, loadPerc, empty)); - - CloseableIteration iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), conf); - while (iter.hasNext()) { - assertEquals("", iter.next().getObject().getData()); - } - iter.close(); - } - - @Test - public void testMaxResults() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri3"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri4"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5"))); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - long limit = 3l; - queryConf.setLimit(limit); - - CloseableIteration iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - int count = 0; - while (iter.hasNext()) { - iter.next().getObject(); - count++; - } - iter.close(); - assertEquals(limit, count); - } - - @Test - public void testAddValue() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - String myval = "myval"; - dao.add(new RyaStatement(cpu, loadPerc, uri1, null, null, null, myval.getBytes())); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - CloseableIteration iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf); - assertTrue(iter.hasNext()); - assertEquals(myval, new String(iter.next().getValue())); - iter.close(); - } - - @Test - public void testAddCv() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - byte[] colVisABC = "A|B|C".getBytes(); - byte[] colVisAB = "A|B".getBytes(); - byte[] colVisA = "A".getBytes(); - dao.add(new RyaStatement(cpu, loadPerc, uri1, null, null, colVisABC)); - dao.add(new RyaStatement(cpu, loadPerc, uri2, null, null, colVisAB)); - dao.add(new RyaStatement(cpu, loadPerc, uri3, null, null, colVisA)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - //query with no auth - CloseableIteration iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - assertEquals(0, count); - iter.close(); - - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(); - queryConf.setAuth("B"); - iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(2, count); - - queryConf.setAuth("A"); - iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(3, count); - } - - @Test - public void testTTL() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - long current = System.currentTimeMillis(); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1"), null, null, null, null, current)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2"), null, null, null, null, current - 1010l)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri3"), null, null, null, null, current - 2010l)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri4"), null, null, null, null, current - 3010l)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5"), null, null, null, null, current - 4010l)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - AccumuloRdfConfiguration queryConf = conf.clone(); - queryConf.setTtl(3000l); - - CloseableIteration iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - int count = 0; - while (iter.hasNext()) { - iter.next().getObject(); - count++; - } - iter.close(); - assertEquals(3, count); - - queryConf.setStartTime(current - 3000l); - iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - count = 0; - while (iter.hasNext()) { - iter.next().getObject(); - count++; - } - iter.close(); - assertEquals(2, count); - } - - @Test - public void testGetNamespace() throws Exception { - dao.addNamespace("ns", litdupsNS); - assertEquals(litdupsNS, dao.getNamespace("ns")); - dao.removeNamespace("ns"); - assertNull(dao.getNamespace("ns")); - } - - //TOOD: Add test for set of queries - @Test - public void testQuery() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - RyaURI uri4 = new RyaURI(litdupsNS + "uri4"); - RyaURI uri5 = new RyaURI(litdupsNS + "uri5"); - RyaURI uri6 = new RyaURI(litdupsNS + "uri6"); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - Collection coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - CloseableIteration iter = queryEngine.batchQuery(coll, conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - //now use batchscanner - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - queryConf.setMaxRangesForScanner(2); - - coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - coll.add(new RyaStatement(null, loadPerc, uri3)); - coll.add(new RyaStatement(null, loadPerc, uri4)); - iter = queryEngine.batchQuery(coll, queryConf); - assertTrue(iter.hasNext()); //old code had a weird behaviour that could not perform hasNext consecutively - assertTrue(iter.hasNext()); - assertTrue(iter.hasNext()); - count = 0; - while (iter.hasNext()) { - count++; - assertTrue(iter.hasNext()); - iter.next(); - } - iter.close(); - assertEquals(4, count); - } - - @Test - public void testQueryDates() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaType uri1 = new RyaType(XMLSchema.DATETIME, "2000-01-01"); - RyaType uri2 = new RyaType(XMLSchema.DATETIME, "2000-01-01TZ"); - RyaType uri3 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01.111Z"); - RyaType uri4 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01"); - RyaType uri5 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01.111"); - RyaType uri6 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01Z"); - RyaType uri7 = new RyaType(XMLSchema.DATETIME, "-2000-01-01T00:00:01Z"); - RyaType uri8 = new RyaType(XMLSchema.DATETIME, "111-01-01T00:00:01Z"); - RyaType uri9 = new RyaType(XMLSchema.DATETIME, "12345-01-01T00:00:01Z"); - - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - dao.add(new RyaStatement(cpu, loadPerc, uri7)); - dao.add(new RyaStatement(cpu, loadPerc, uri8)); - dao.add(new RyaStatement(cpu, loadPerc, uri9)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - Collection coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - CloseableIteration iter = queryEngine.batchQuery(coll, conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - //now use batchscanner - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - queryConf.setMaxRangesForScanner(2); - - coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - coll.add(new RyaStatement(null, loadPerc, uri3)); - coll.add(new RyaStatement(null, loadPerc, uri4)); - coll.add(new RyaStatement(null, loadPerc, uri5)); - coll.add(new RyaStatement(null, loadPerc, uri6)); - coll.add(new RyaStatement(null, loadPerc, uri7)); - coll.add(new RyaStatement(null, loadPerc, uri8)); - coll.add(new RyaStatement(null, loadPerc, uri9)); - iter = queryEngine.batchQuery(coll, queryConf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(9, count); - } - - @Test - public void testQueryCollectionRegex() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - RyaURI uri4 = new RyaURI(litdupsNS + "uri4"); - RyaURI uri5 = new RyaURI(litdupsNS + "uri5"); - RyaURI uri6 = new RyaURI(litdupsNS + "uri6"); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - Collection coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - conf.setRegexPredicate(loadPerc.getData()); - CloseableIteration iter = queryEngine.batchQuery(coll, conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - conf.setRegexPredicate("notLoadPerc"); - iter = queryEngine.batchQuery(coll, conf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testQueryCollectionRegexWBatchScanner() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - RyaURI uri4 = new RyaURI(litdupsNS + "uri4"); - RyaURI uri5 = new RyaURI(litdupsNS + "uri5"); - RyaURI uri6 = new RyaURI(litdupsNS + "uri6"); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - queryConf.setMaxRangesForScanner(1); - - Collection coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - conf.setRegexPredicate(loadPerc.getData()); - CloseableIteration iter = queryEngine.batchQuery(coll, queryConf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - queryConf.setRegexPredicate("notLoadPerc"); - iter = queryEngine.batchQuery(coll, queryConf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testLiteralTypes() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaType longLit = new RyaType(XMLSchema.LONG, "3"); - - dao.add(new RyaStatement(cpu, loadPerc, longLit)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - CloseableIteration query = queryEngine.query(new RyaStatement(cpu, null, null), conf); - assertTrue(query.hasNext()); - RyaStatement next = query.next(); - assertEquals(new Long(longLit.getData()), new Long(next.getObject().getData())); - query.close(); - - RyaType doubleLit = new RyaType(XMLSchema.DOUBLE, "2.0"); - - dao.add(new RyaStatement(cpu, loadPerc, doubleLit)); - - query = queryEngine.query(new RyaStatement(cpu, loadPerc, doubleLit), conf); - assertTrue(query.hasNext()); - next = query.next(); - assertEquals(Double.parseDouble(doubleLit.getData()), Double.parseDouble(next.getObject().getData()), 0.001); - query.close(); - } - - @Test - public void testSameLiteralStringTypes() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaType longLit = new RyaType(XMLSchema.LONG, "10"); - RyaType strLit = new RyaType(XMLSchema.STRING, new String(RyaContext.getInstance().serializeType(longLit)[0])); - - RyaStatement expected = new RyaStatement(cpu, loadPerc, longLit); - dao.add(expected); - dao.add(new RyaStatement(cpu, loadPerc, strLit)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - CloseableIteration query = queryEngine.query(new RyaStatement(cpu, loadPerc, longLit), conf); - assertTrue(query.hasNext()); - RyaStatement next = query.next(); - assertEquals(new Long(longLit.getData()), new Long(next.getObject().getData())); - assertEquals(longLit.getDataType(), next.getObject().getDataType()); - assertFalse(query.hasNext()); - query.close(); - } - - @Test - public void testPurge() throws RyaDAOException, TableNotFoundException { - dao.add(newRyaStatement()); - assertFalse("table should not be empty", areTablesEmpty()); - - dao.purge(conf); - assertTrue("table should be empty", areTablesEmpty()); - //assertNotNull(dao.getVersion()); - } - - @Test - public void testPurgeDoesNotBreakBatchWriters() throws TableNotFoundException, RyaDAOException { - dao.purge(conf); - assertTrue("table should be empty", areTablesEmpty()); - - dao.add(newRyaStatement()); - assertFalse("table should not be empty", areTablesEmpty()); - } - - @Test - public void testDropAndDestroy() throws RyaDAOException { - assertTrue(dao.isInitialized()); - dao.dropAndDestroy(); - for (String tableName : dao.getTables()) { - assertFalse(tableExists(tableName)); - } - assertFalse(dao.isInitialized()); - } - - private boolean areTablesEmpty() throws TableNotFoundException { - for (String table : dao.getTables()) { - if (tableExists(table)) { - // TODO: filter out version - if (createScanner(table).iterator().hasNext()) { - return false; - } - } - } - return true; - } - - private boolean tableExists(String tableName) { - return dao.getConnector().tableOperations().exists(tableName); - } - - private Scanner createScanner(String tableName) throws TableNotFoundException { - return dao.getConnector().createScanner(tableName, conf.getAuthorizations()); - } - - private RyaStatement newRyaStatement() { - RyaURI subject = new RyaURI(litdupsNS + randomString()); - RyaURI predicate = new RyaURI(litdupsNS + randomString()); - RyaType object = new RyaType(randomString()); - - return new RyaStatement(subject, predicate, object); - } - - private String randomString() { - return UUID.randomUUID().toString(); - } -} diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java deleted file mode 100644 index 7c3331df1..000000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.accumulo; - -// -//import junit.framework.TestCase; -//import mvm.rya.accumulo.AccumuloRdfConfiguration; -//import mvm.rya.accumulo.DefineTripleQueryRangeFactory; -//import mvm.rya.accumulo.AccumuloRdfConfiguration; -//import mvm.rya.accumulo.DefineTripleQueryRangeFactory; -//import mvm.rya.api.domain.RangeValue; -//import org.apache.accumulo.core.data.Range; -//import org.openrdf.model.URI; -//import org.openrdf.model.Value; -//import org.openrdf.model.ValueFactory; -//import org.openrdf.model.impl.ValueFactoryImpl; -// -//import java.util.Map; -// -//import static mvm.rya.api.RdfCloudTripleStoreConstants.*; -// -///** -// */ -//public class DefineTripleQueryRangeFactoryTest extends TestCase { -// -// public static final String DELIM_BYTES_STR = new String(DELIM_BYTES); -// public static final String URI_MARKER_STR = "\u0007"; -// public static final String RANGE_ENDKEY_SUFFIX = "\u0000"; -// DefineTripleQueryRangeFactory factory = new DefineTripleQueryRangeFactory(); -// ValueFactory vf = ValueFactoryImpl.getInstance(); -// static String litdupsNS = "urn:test:litdups#"; -// -// private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -// -// public void testSPOCases() throws Exception { -// URI cpu = vf.createURI(litdupsNS, "cpu"); -// URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -// URI obj = vf.createURI(litdupsNS, "uri1"); -// -// //spo -// Map.Entry entry = -// factory.defineRange(cpu, loadPerc, obj, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// String expected_start = URI_MARKER_STR + cpu.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + loadPerc.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// -// //sp -// entry = factory.defineRange(cpu, loadPerc, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + cpu.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + loadPerc.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //s -// entry = factory.defineRange(cpu, null, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + cpu.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //all -// entry = factory.defineRange(null, null, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// assertEquals("", -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(new String(new byte[]{Byte.MAX_VALUE}) + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testSPOCasesWithRanges() throws Exception { -// URI subj_start = vf.createURI(litdupsNS, "subj_start"); -// URI subj_end = vf.createURI(litdupsNS, "subj_stop"); -// URI pred_start = vf.createURI(litdupsNS, "pred_start"); -// URI pred_end = vf.createURI(litdupsNS, "pred_stop"); -// URI obj_start = vf.createURI(litdupsNS, "obj_start"); -// URI obj_end = vf.createURI(litdupsNS, "obj_stop"); -// -// Value subj = new RangeValue(subj_start, subj_end); -// Value pred = new RangeValue(pred_start, pred_end); -// Value obj = new RangeValue(obj_start, obj_end); -// -// //spo - o has range -// Map.Entry entry = -// factory.defineRange(subj_start, pred_start, obj, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// String expected_start = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// String expected_end = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //sp - p has range -// entry = factory.defineRange(subj_start, pred, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //s - s has range -// entry = factory.defineRange(subj, null, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + subj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + subj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testPOCases() throws Exception { -// URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -// URI obj = vf.createURI(litdupsNS, "uri1"); -// -// //po -// Map.Entry entry = -// factory.defineRange(null, loadPerc, obj, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// String expected_start = URI_MARKER_STR + loadPerc.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //p -// entry = factory.defineRange(null, loadPerc, null, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// expected_start = URI_MARKER_STR + loadPerc.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testPOCasesWithRanges() throws Exception { -// URI pred_start = vf.createURI(litdupsNS, "pred_start"); -// URI pred_end = vf.createURI(litdupsNS, "pred_stop"); -// URI obj_start = vf.createURI(litdupsNS, "obj_start"); -// URI obj_end = vf.createURI(litdupsNS, "obj_stop"); -// -// Value pred = new RangeValue(pred_start, pred_end); -// Value obj = new RangeValue(obj_start, obj_end); -// -// //po -// Map.Entry entry = -// factory.defineRange(null, pred_start, obj, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// String expected_start = URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// String expected_end = URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //p -// entry = factory.defineRange(null, pred, null, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// expected_start = URI_MARKER_STR + pred_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + pred_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testOSPCases() throws Exception { -// URI cpu = vf.createURI(litdupsNS, "cpu"); -// URI obj = vf.createURI(litdupsNS, "uri1"); -// -// //so -// Map.Entry entry = -// factory.defineRange(cpu, null, obj, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// String expected_start = URI_MARKER_STR + obj.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + cpu.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //o -// entry = factory.defineRange(null, null, obj, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// expected_start = URI_MARKER_STR + obj.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// -// public void testOSPCasesWithRanges() throws Exception { -// URI subj_start = vf.createURI(litdupsNS, "subj_start"); -// URI subj_end = vf.createURI(litdupsNS, "subj_stop"); -// URI obj_start = vf.createURI(litdupsNS, "obj_start"); -// URI obj_end = vf.createURI(litdupsNS, "obj_stop"); -// -// Value subj = new RangeValue(subj_start, subj_end); -// Value obj = new RangeValue(obj_start, obj_end); -// -// //so - s should be the range -// Map.Entry entry = -// factory.defineRange(subj, null, obj_start, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// String expected_start = URI_MARKER_STR + obj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + subj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// String expected_end = URI_MARKER_STR + obj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + subj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //o - o is range -// entry = factory.defineRange(null, null, obj, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// expected_start = URI_MARKER_STR + obj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + obj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -//} diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java deleted file mode 100644 index bda73e2d2..000000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java +++ /dev/null @@ -1,282 +0,0 @@ -package mvm.rya.accumulo.mr.eval; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RdfToRyaConversions; -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.PartialKey; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.TablePermission; -import org.apache.hadoop.io.Text; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * Created by IntelliJ IDEA. - * Date: 4/24/12 - * Time: 5:05 PM - * To change this template use File | Settings | File Templates. - */ -@Ignore -public class AccumuloRdfCountToolTest { - - private String user = "user"; - private String pwd = "pwd"; - private String instance = "myinstance"; - private String tablePrefix = "t_"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - - private AccumuloRyaDAO dao; - private ValueFactory vf = new ValueFactoryImpl(); - private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - static String litdupsNS = "urn:test:litdups#"; - - @Before - public void setUp() throws Exception { - connector = new MockInstance(instance).getConnector(user, pwd.getBytes()); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - SecurityOperations secOps = connector.securityOperations(); - secOps.createUser(user, pwd.getBytes(), auths); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.WRITE); - - dao = new AccumuloRyaDAO(); - dao.setConnector(connector); - conf.setTablePrefix(tablePrefix); - dao.setConf(conf); - dao.init(); - } - - @After - public void tearDown() throws Exception { - dao.destroy(); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - } - - @Test - public void testMR() throws Exception { - RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1")); - RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1")); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10)))); - - AccumuloRdfCountTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Drdf.tablePrefix=" + tablePrefix, - }); - - Map expectedValues = new HashMap(); - String row = test1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.SUBJECT_CF_TXT, - RdfCloudTripleStoreConstants.EMPTY_TEXT)); - row = pred1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.PRED_CF_TXT, - RdfCloudTripleStoreConstants.EMPTY_TEXT)); - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); - scanner.setRange(new Range()); - int count = 0; - for (Map.Entry entry : scanner) { - assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL)); - assertEquals(11, Long.parseLong(entry.getValue().toString())); - count++; - } - assertEquals(2, count); - } - -// public void testMRObject() throws Exception { -// URI pred1 = vf.createURI(litdupsNS, "pred1"); -// Literal literal = vf.createLiteral(0); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test0"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test1"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test2"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test3"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test4"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test5"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test6"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test7"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test8"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test9"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test10"), pred1, literal)); -// dao.commit(); -// -// AccumuloRdfCountTool.main(new String[]{ -// "-Dac.mock=true", -// "-Dac.instance=" + instance, -// "-Dac.username=" + user, -// "-Dac.pwd=" + pwd, -// "-Drdf.tablePrefix=" + tablePrefix, -// }); -// -// Map expectedValues = new HashMap(); -// byte[] row_bytes = RdfCloudTripleStoreUtils.writeValue(literal); -// expectedValues.put(new String(row_bytes), -// new Key(new Text(row_bytes), -// RdfCloudTripleStoreConstants.OBJ_CF_TXT, -// RdfCloudTripleStoreConstants.INFO_TXT)); -// row_bytes = RdfCloudTripleStoreUtils.writeValue(pred1); -// expectedValues.put(new String(row_bytes), -// new Key(new Text(row_bytes), -// RdfCloudTripleStoreConstants.PRED_CF_TXT, -// RdfCloudTripleStoreConstants.INFO_TXT)); -// Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); -// scanner.setRange(new Range()); -// int count = 0; -// for (Map.Entry entry : scanner) { -// assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL)); -// assertEquals(11, Long.parseLong(entry.getValue().toString())); -// count++; -// } -// assertEquals(2, count); -// } - - @Test - public void testTTL() throws Exception { - RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1")); - RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1")); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10)))); - - AccumuloRdfCountTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Dac.ttl=0", - "-Drdf.tablePrefix=" + tablePrefix, - }); - - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); - scanner.setRange(new Range()); - int count = 0; - for (Map.Entry entry : scanner) { - count++; - } - assertEquals(0, count); - } - - @Test - public void testContext() throws Exception { - RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1")); - RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1")); - RyaURI cntxt = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cntxt")); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10)), cntxt)); - - AccumuloRdfCountTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Drdf.tablePrefix=" + tablePrefix, - }); - - Map expectedValues = new HashMap(); - String row = test1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.SUBJECT_CF_TXT, - new Text(cntxt.getData()))); - row = pred1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.PRED_CF_TXT, - new Text(cntxt.getData()))); - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); - scanner.setRange(new Range()); - int count = 0; - for (Map.Entry entry : scanner) { - assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL)); - assertEquals(11, Long.parseLong(entry.getValue().toString())); - count++; - } - assertEquals(2, count); - } -} diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java deleted file mode 100644 index 02b835737..000000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java +++ /dev/null @@ -1,146 +0,0 @@ -package mvm.rya.accumulo.mr.fileinput; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Iterator; -import java.util.Map; - -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.TablePermission; -import org.apache.hadoop.io.Text; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.rio.RDFFormat; - -/** - * Created by IntelliJ IDEA. - * Date: 4/25/12 - * Time: 10:51 AM - * To change this template use File | Settings | File Templates. - */ -public class RdfFileInputToolTest extends TestCase { - - private String user = "user"; - private String pwd = "pwd"; - private String instance = "myinstance"; - private String tablePrefix = "t_"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - - @Override - public void setUp() throws Exception { - super.setUp(); - connector = new MockInstance(instance).getConnector(user, pwd.getBytes()); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - SecurityOperations secOps = connector.securityOperations(); - secOps.createUser(user, pwd.getBytes(), auths); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.WRITE); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - } - - public void testNTriplesInput() throws Exception { - RdfFileInputTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Drdf.tablePrefix=" + tablePrefix, - "-Drdf.format=" + RDFFormat.NTRIPLES.getName(), - "src/test/resources/test.ntriples", - }); - - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, auths); - scanner.setRange(new Range()); - Iterator> iterator = scanner.iterator(); - ValueFactory vf = new ValueFactoryImpl(); - assertTrue(iterator.hasNext()); - RyaStatement rs = new RyaStatement(new RyaURI("urn:lubm:rdfts#GraduateStudent01"), - new RyaURI("urn:lubm:rdfts#hasFriend"), - new RyaURI("urn:lubm:rdfts#GraduateStudent02")); - assertEquals(new Text(RyaTripleContext.getInstance(new AccumuloRdfConfiguration()).serializeTriple(rs).get(TABLE_LAYOUT.SPO).getRow()), iterator.next().getKey().getRow()); - } - - public void testInputContext() throws Exception { - RdfFileInputTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Drdf.tablePrefix=" + tablePrefix, - "-Drdf.format=" + RDFFormat.TRIG.getName(), - "src/test/resources/namedgraphs.trig", - }); - - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, auths); - scanner.setRange(new Range()); - Iterator> iterator = scanner.iterator(); - ValueFactory vf = new ValueFactoryImpl(); - assertTrue(iterator.hasNext()); - RyaStatement rs = new RyaStatement(new RyaURI("http://www.example.org/exampleDocument#Monica"), - new RyaURI("http://www.example.org/vocabulary#name"), - new RyaType("Monica Murphy"), - new RyaURI("http://www.example.org/exampleDocument#G1")); - Key key = iterator.next().getKey(); - - TripleRow tripleRow = RyaTripleContext.getInstance(new AccumuloRdfConfiguration()).serializeTriple(rs).get(TABLE_LAYOUT.SPO); - assertEquals(new Text(tripleRow.getRow()), key.getRow()); - assertEquals(new Text(tripleRow.getColumnFamily()), key.getColumnFamily()); - } - -} diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java deleted file mode 100644 index 5ac2d74a9..000000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java +++ /dev/null @@ -1,319 +0,0 @@ -package mvm.rya.accumulo.mr.upgrade; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.accumulo.query.AccumuloRyaQueryEngine; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.RyaQuery; -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.*; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.TablePermission; -import org.calrissian.mango.collect.CloseableIterable; -import org.openrdf.model.vocabulary.XMLSchema; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Map; - -/** - * Created by IntelliJ IDEA. - * Date: 4/25/12 - * Time: 10:51 AM - * To change this template use File | Settings | File Templates. - */ -public class Upgrade322ToolTest extends TestCase { - - private String user = "user"; - private String pwd = "pwd"; - private String instance = "myinstance"; - private String tablePrefix = "t_"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - - @Override - public void setUp() throws Exception { - super.setUp(); - - final String spoTable = tablePrefix + - RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX; - final String poTable = tablePrefix + - RdfCloudTripleStoreConstants.TBL_PO_SUFFIX; - final String ospTable = tablePrefix + - RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX; - - connector = new MockInstance(instance).getConnector(user, pwd.getBytes()); - - connector.tableOperations().create(spoTable); - connector.tableOperations().create(poTable); - connector.tableOperations().create(ospTable); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - SecurityOperations secOps = connector.securityOperations(); - secOps.createUser(user, pwd.getBytes(), auths); - secOps.grantTablePermission(user, spoTable, TablePermission.READ); - secOps.grantTablePermission(user, poTable, TablePermission.READ); - secOps.grantTablePermission(user, ospTable, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.WRITE); - - //load data - final BatchWriter ospWriter = connector - .createBatchWriter(ospTable, new BatchWriterConfig()); - ospWriter.addMutation(getMutation("00000000000000000010\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0000http://here/2010/tracked-data-provenance/ns#longLit\u0001\u0004")); - ospWriter.addMutation(getMutation("00000000010\u0000http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#intLit\u0001\u0005")); - ospWriter.addMutation(getMutation("00000010\u0000http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#byteLit\u0001\t")); - ospWriter.addMutation(getMutation("00001 1.0\u0000http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#doubleLit\u0001\u0006")); - ospWriter.addMutation(getMutation("10\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0000http" + - "://here/2010/tracked-data-provenance/ns#shortLit\u0001http://www.w3" + - ".org/2001/XMLSchema#short\u0001\b")); - ospWriter.addMutation(getMutation("10.0\u0000http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#floatLit\u0001http" + - "://www.w3.org/2001/XMLSchema#float\u0001\b")); - ospWriter.addMutation(getMutation("3.0.0\u0000urn:mvm.rya/2012/05#rts\u0000urn:mvm" + - ".rya/2012/05#version\u0001\u0003")); - ospWriter.addMutation(getMutation("9223370726404375807\u0000http://here/2010/tracked-data-provenance/ns" + - "#uuid10\u0000http://here/2010/tracked-data-provenance/ns#dateLit" + - "\u0001\u0007")); - ospWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#Created\u0000http://here" + - "/2010/tracked-data-provenance/ns#uuid10\u0000http://www.w3" + - ".org/1999/02/22-rdf-syntax-ns#type\u0001\u0002")); - ospWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#objectuuid1\u0000http" + - "://here/2010/tracked-data-provenance/ns#uuid10\u0000http://here/2010" + - "/tracked-data-provenance/ns#uriLit\u0001\u0002")); - ospWriter.addMutation(getMutation("stringLit\u0000http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#stringLit\u0001" + - "\u0003")); - ospWriter.addMutation(getMutation("true\u0000http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#booleanLit\u0001\n")); - ospWriter.flush(); - ospWriter.close(); - - final BatchWriter spoWriter = connector - .createBatchWriter(spoTable, new BatchWriterConfig()); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10\u0000http://here/2010/tracked-data-provenance/ns#longLit\u000000000000000000000010\u0001\u0004")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#intLit\u000000000000010\u0001\u0005")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#byteLit\u000000000010\u0001\t")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#doubleLit\u000000001 1.0\u0001\u0006")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10\u0000http" + - "://here/2010/tracked-data-provenance/ns#shortLit\u000010\u0001http://www.w3" + - ".org/2001/XMLSchema#short\u0001\b")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#floatLit\u0001http" + - "://www.w3.org/2001/XMLSchema#float\u000010.0\u0001\b")); - spoWriter.addMutation(getMutation("urn:mvm.rya/2012/05#rts\u0000urn:mvm" + - ".rya/2012/05#version\u00003.0.0\u0001\u0003")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns" + - "#uuid10\u0000http://here/2010/tracked-data-provenance/ns#dateLit" + - "\u00009223370726404375807\u0001\u0007")); - spoWriter.addMutation(getMutation("http://here" + - "/2010/tracked-data-provenance/ns#uuid10\u0000http://www.w3" + - ".org/1999/02/22-rdf-syntax-ns#type\u0000http://here/2010/tracked-data-provenance/ns#Created\u0001\u0002")); - spoWriter.addMutation(getMutation("http" + - "://here/2010/tracked-data-provenance/ns#uuid10\u0000http://here/2010" + - "/tracked-data-provenance/ns#uriLit\u0000http://here/2010/tracked-data-provenance/ns#objectuuid1\u0001\u0002")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#stringLit\u0000stringLit\u0001" + - "\u0003")); - spoWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#uuid10" + - "\u0000http://here/2010/tracked-data-provenance/ns#booleanLit\u0000true\u0001\n")); - spoWriter.flush(); - spoWriter.close(); - - final BatchWriter poWriter = connector - .createBatchWriter(poTable, new BatchWriterConfig()); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#longLit\u000000000000000000000010\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\u0004")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#intLit\u000000000000010\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\u0005")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#byteLit\u000000000010\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\t")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#doubleLit\u000000001 1.0\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\u0006")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#shortLit\u000010\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001http://www.w3" + - ".org/2001/XMLSchema#short\u0001\b")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#floatLit\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001http" + - "://www.w3.org/2001/XMLSchema#float\u000010.0\u0001\b")); - poWriter.addMutation(getMutation("urn:mvm" + - ".rya/2012/05#version\u00003.0.0\u0000urn:mvm.rya/2012/05#rts\u0001\u0003")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#dateLit" + - "\u00009223370726404375807\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\u0007")); - poWriter.addMutation(getMutation("http://www.w3" + - ".org/1999/02/22-rdf-syntax-ns#type\u0000http://here/2010/tracked-data-provenance/ns#Created\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\u0002")); - poWriter.addMutation(getMutation("http://here/2010" + - "/tracked-data-provenance/ns#uriLit\u0000http://here/2010/tracked-data-provenance/ns#objectuuid1\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\u0002")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#stringLit\u0000stringLit\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001" + - "\u0003")); - poWriter.addMutation(getMutation("http://here/2010/tracked-data-provenance/ns#booleanLit\u0000true\u0000http://here/2010/tracked-data-provenance/ns#uuid10\u0001\n")); - poWriter.flush(); - poWriter.close(); - } - - public Mutation getMutation(String row) { - final Mutation mutation = new Mutation(row); - mutation.put("", "", ""); - return mutation; - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().delete( - tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().delete( - tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - } - - public void testUpgrade() throws Exception { - Upgrade322Tool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Drdf.tablePrefix=" + tablePrefix, - }); - - final AccumuloRdfConfiguration configuration = new AccumuloRdfConfiguration(); - configuration.setTablePrefix(tablePrefix); - final AccumuloRyaDAO ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConnector(connector); - ryaDAO.setConf(configuration); - ryaDAO.init(); - - final AccumuloRyaQueryEngine queryEngine = ryaDAO.getQueryEngine(); - - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#booleanLit"), - new RyaType(XMLSchema.BOOLEAN, "true")), queryEngine); - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#longLit"), - new RyaType(XMLSchema.LONG, "10")), queryEngine); - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#intLit"), - new RyaType(XMLSchema.INTEGER, "10")), queryEngine); - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#byteLit"), - new RyaType(XMLSchema.BYTE, "10")), queryEngine); - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#doubleLit"), - new RyaType(XMLSchema.DOUBLE, "10.0")), queryEngine); - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#dateLit"), - new RyaType(XMLSchema.DATETIME, "2011-07-12T06:00:00.000Z")), queryEngine); - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#stringLit"), - new RyaType("stringLit")), queryEngine); - verify(new RyaStatement( - new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), - new RyaURI("http://here/2010/tracked-data-provenance/ns#uriLit"), - new RyaURI("http://here/2010/tracked-data-provenance/ns" + - "#objectuuid1")), queryEngine); - verify(new RyaStatement( - new RyaURI("urn:mvm.rya/2012/05#rts"), - new RyaURI("urn:mvm.rya/2012/05#version"), - new RyaType("3.0.0")), queryEngine); - } - - private void verify(RyaStatement ryaStatement, AccumuloRyaQueryEngine queryEngine) - throws RyaDAOException, IOException { - - //check osp - CloseableIterable statements = - queryEngine.query(RyaQuery.builder(new RyaStatement(null, null, ryaStatement.getObject())) - .build()); - try { - verifyFirstStatement(ryaStatement, statements); - } finally { - statements.close(); - } - - //check po - statements = queryEngine.query(RyaQuery.builder( - new RyaStatement(null, ryaStatement.getPredicate(), - ryaStatement.getObject())).build()); - try { - verifyFirstStatement(ryaStatement, statements); - } finally { - statements.close(); - } - - //check spo - statements = queryEngine.query(RyaQuery.builder( - new RyaStatement(ryaStatement.getSubject(), - ryaStatement.getPredicate(), - ryaStatement.getObject())).build()); - try { - verifyFirstStatement(ryaStatement, statements); - } finally { - statements.close(); - } - } - - private void verifyFirstStatement( - RyaStatement ryaStatement, CloseableIterable statements) { - final Iterator iterator = statements.iterator(); - assertTrue(iterator.hasNext()); - final RyaStatement first = iterator.next(); - assertEquals(ryaStatement.getSubject(), first.getSubject()); - assertEquals(ryaStatement.getPredicate(), first.getPredicate()); - assertEquals(ryaStatement.getObject(), first.getObject()); - assertFalse(iterator.hasNext()); - } - - public void printTableData(String tableName) - throws TableNotFoundException{ - Scanner scanner = connector.createScanner(tableName, auths); - scanner.setRange(new Range()); - for(Map.Entry entry : scanner) { - final Key key = entry.getKey(); - final Value value = entry.getValue(); - System.out.println(key.getRow() + " " + key.getColumnFamily() + " " + key.getColumnQualifier() + " " + key.getTimestamp() + " " + value.toString()); - } - } - -} diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java deleted file mode 100644 index b1382927d..000000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java +++ /dev/null @@ -1,119 +0,0 @@ -package mvm.rya.accumulo.mr.upgrade; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.resolver.impl.*; -import org.junit.Test; - -import static mvm.rya.accumulo.mr.upgrade.Upgrade322Tool.UpgradeObjectSerialization; -import static org.junit.Assert.*; - -public class UpgradeObjectSerializationTest { - - @Test - public void testBooleanUpgrade() throws Exception { - String object = "true"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, BooleanRyaTypeResolver.BOOLEAN_LITERAL_MARKER); - - assertEquals("1", upgrade); - } - - @Test - public void testBooleanUpgradeFalse() throws Exception { - String object = "false"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, BooleanRyaTypeResolver.BOOLEAN_LITERAL_MARKER); - - assertEquals("0", upgrade); - } - - @Test - public void testByteUpgradeLowest() throws Exception { - String object = "-127"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, ByteRyaTypeResolver.LITERAL_MARKER); - - assertEquals("81", upgrade); - } - - @Test - public void testByteUpgradeHighest() throws Exception { - String object = "127"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, ByteRyaTypeResolver.LITERAL_MARKER); - - assertEquals("7f", upgrade); - } - - @Test - public void testLongUpgrade() throws Exception { - String object = "00000000000000000010"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, LongRyaTypeResolver.LONG_LITERAL_MARKER); - - assertEquals("800000000000000a", upgrade); - } - - @Test - public void testIntUpgrade() throws Exception { - String object = "00000000010"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, IntegerRyaTypeResolver.INTEGER_LITERAL_MARKER); - - assertEquals("8000000a", upgrade); - } - - @Test - public void testDateTimeUpgrade() throws Exception { - String object = "9223370726404375807"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, DateTimeRyaTypeResolver.DATETIME_LITERAL_MARKER); - - assertEquals("800001311cee3b00", upgrade); - } - - @Test - public void testDoubleUpgrade() throws Exception { - String object = "00001 1.0"; - final UpgradeObjectSerialization upgradeObjectSerialization - = new UpgradeObjectSerialization(); - final String upgrade = upgradeObjectSerialization - .upgrade(object, DoubleRyaTypeResolver.DOUBLE_LITERAL_MARKER); - - assertEquals("c024000000000000", upgrade); - } -} diff --git a/dao/accumulo.rya/src/test/resources/namedgraphs.trig b/dao/accumulo.rya/src/test/resources/namedgraphs.trig deleted file mode 100644 index b647632fd..000000000 --- a/dao/accumulo.rya/src/test/resources/namedgraphs.trig +++ /dev/null @@ -1,7 +0,0 @@ -@prefix rdf: . -@prefix xsd: . -@prefix swp: . -@prefix dc: . -@prefix ex: . -@prefix : . -:G1 { :Monica ex:name "Monica Murphy" . } \ No newline at end of file diff --git a/dao/accumulo.rya/src/test/resources/test.ntriples b/dao/accumulo.rya/src/test/resources/test.ntriples deleted file mode 100644 index 26a0a17aa..000000000 --- a/dao/accumulo.rya/src/test/resources/test.ntriples +++ /dev/null @@ -1 +0,0 @@ - . \ No newline at end of file diff --git a/dao/mongodb.rya/pom.xml b/dao/mongodb.rya/pom.xml deleted file mode 100644 index 0d87fa543..000000000 --- a/dao/mongodb.rya/pom.xml +++ /dev/null @@ -1,48 +0,0 @@ - - - - - 4.0.0 - - org.apache.rya - rya.dao - 3.2.10-SNAPSHOT - - - mongodb.rya - Apache Rya MongoDB DAO - - - - org.apache.rya - rya.api - - - - org.mongodb - mongo-java-driver - - - de.flapdoodle.embed - de.flapdoodle.embed.mongo - - - - diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java deleted file mode 100644 index 57548ec18..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java +++ /dev/null @@ -1,207 +0,0 @@ -package mvm.rya.mongodb; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.Closeable; -import java.io.IOException; -import java.net.UnknownHostException; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.BatchRyaQuery; -import mvm.rya.api.persist.query.RyaQuery; -import mvm.rya.api.persist.query.RyaQueryEngine; -import mvm.rya.mongodb.dao.MongoDBStorageStrategy; -import mvm.rya.mongodb.dao.SimpleMongoDBStorageStrategy; -import mvm.rya.mongodb.iter.NonCloseableRyaStatementCursorIterator; -import mvm.rya.mongodb.iter.RyaStatementBindingSetCursorIterator; -import mvm.rya.mongodb.iter.RyaStatementCursorIterable; -import mvm.rya.mongodb.iter.RyaStatementCursorIterator; - -import org.calrissian.mango.collect.CloseableIterable; -import org.openrdf.query.BindingSet; - -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.MongoClient; - -/** - * Date: 7/17/12 - * Time: 9:28 AM - */ -public class MongoDBQueryEngine implements RyaQueryEngine, Closeable { - - private MongoDBRdfConfiguration configuration; - private MongoClient mongoClient; - private DBCollection coll; - private MongoDBStorageStrategy strategy; - - public MongoDBQueryEngine(MongoDBRdfConfiguration conf) throws NumberFormatException, UnknownHostException{ - mongoClient = new MongoClient(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE), - Integer.valueOf(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT))); - DB db = mongoClient.getDB( conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME)); - coll = db.getCollection(conf.getTriplesCollectionName()); - this.strategy = new SimpleMongoDBStorageStrategy(); - } - - - @Override - public void setConf(MongoDBRdfConfiguration conf) { - configuration = conf; - } - - @Override - public MongoDBRdfConfiguration getConf() { - return configuration; - } - - @Override - public CloseableIteration query( - RyaStatement stmt, MongoDBRdfConfiguration conf) - throws RyaDAOException { - if (conf == null) { - conf = configuration; - } - Long maxResults = conf.getLimit(); - Set queries = new HashSet(); - DBObject query = strategy.getQuery(stmt); - queries.add(query); - RyaStatementCursorIterator iterator = new RyaStatementCursorIterator(coll, queries, strategy); - - if (maxResults != null) { - iterator.setMaxResults(maxResults); - } - return iterator; - } - @Override - public CloseableIteration, RyaDAOException> queryWithBindingSet( - Collection> stmts, - MongoDBRdfConfiguration conf) throws RyaDAOException { - if (conf == null) { - conf = configuration; - } - Long maxResults = conf.getLimit(); - Map rangeMap = new HashMap(); - - //TODO: cannot span multiple tables here - try { - for (Map.Entry stmtbs : stmts) { - RyaStatement stmt = stmtbs.getKey(); - BindingSet bs = stmtbs.getValue(); - DBObject query = strategy.getQuery(stmt); - rangeMap.put(query, bs); - } - - // TODO not sure what to do about regex ranges? - RyaStatementBindingSetCursorIterator iterator = new RyaStatementBindingSetCursorIterator(coll, rangeMap, strategy); - - if (maxResults != null) { - iterator.setMaxResults(maxResults); - } - return iterator; - } catch (Exception e) { - throw new RyaDAOException(e); - } - - } - @Override - public CloseableIteration batchQuery( - Collection stmts, MongoDBRdfConfiguration conf) - throws RyaDAOException { - if (conf == null) { - conf = configuration; - } - Long maxResults = conf.getLimit(); - Set queries = new HashSet(); - - try { - for (RyaStatement stmt : stmts) { - queries.add( strategy.getQuery(stmt)); - } - - // TODO not sure what to do about regex ranges? - RyaStatementCursorIterator iterator = new RyaStatementCursorIterator(coll, queries, strategy); - - if (maxResults != null) { - iterator.setMaxResults(maxResults); - } - return iterator; - } catch (Exception e) { - throw new RyaDAOException(e); - } - - } - @Override - public CloseableIterable query(RyaQuery ryaQuery) - throws RyaDAOException { - Set queries = new HashSet(); - - try { - queries.add( strategy.getQuery(ryaQuery)); - - // TODO not sure what to do about regex ranges? - // TODO this is gross - RyaStatementCursorIterable iterator = new RyaStatementCursorIterable(new NonCloseableRyaStatementCursorIterator(new RyaStatementCursorIterator(coll, queries, strategy))); - - return iterator; - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - @Override - public CloseableIterable query(BatchRyaQuery batchRyaQuery) - throws RyaDAOException { - try { - Set queries = new HashSet(); - for (RyaStatement statement : batchRyaQuery.getQueries()){ - queries.add( strategy.getQuery(statement)); - - } - - // TODO not sure what to do about regex ranges? - // TODO this is gross - RyaStatementCursorIterable iterator = new RyaStatementCursorIterable(new NonCloseableRyaStatementCursorIterator(new RyaStatementCursorIterator(coll, queries, strategy))); - - return iterator; - } catch (Exception e) { - throw new RyaDAOException(e); - } - } - - @Override - public void close() throws IOException { - if (mongoClient != null){ mongoClient.close(); } - } - - - - - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java deleted file mode 100644 index 3c5a8ef66..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java +++ /dev/null @@ -1,121 +0,0 @@ -package mvm.rya.mongodb; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.index.RyaSecondaryIndexer; - -import org.apache.hadoop.conf.Configuration; - -import com.google.common.collect.Lists; - -public class MongoDBRdfConfiguration extends RdfCloudTripleStoreConfiguration { - public static final String MONGO_INSTANCE = "mongo.db.instance"; - public static final String MONGO_INSTANCE_PORT = "mongo.db.port"; - public static final String MONGO_GEO_MAXDISTANCE = "mongo.geo.maxdist"; - public static final String MONGO_DB_NAME = "mongo.db.name"; - public static final String MONGO_COLLECTION_PREFIX = "mongo.db.collectionprefix"; - public static final String MONGO_USER = "mongo.db.user"; - public static final String MONGO_USER_PASSWORD = "mongo.db.userpassword"; - public static final String USE_TEST_MONGO = "mongo.db.test"; - public static final String CONF_ADDITIONAL_INDEXERS = "ac.additional.indexers"; - - public MongoDBRdfConfiguration() { - super(); - } - - public MongoDBRdfConfiguration(Configuration other) { - super(other); - } - - @Override - public MongoDBRdfConfiguration clone() { - return new MongoDBRdfConfiguration(this); - } - - public boolean getUseTestMongo() { - return this.getBoolean(USE_TEST_MONGO, false); - } - - public void setUseTestMongo(boolean useTestMongo) { - this.setBoolean(USE_TEST_MONGO, useTestMongo); - } - - public String getTriplesCollectionName() { - return this.get(MONGO_COLLECTION_PREFIX, "rya") + "_triples"; - } - - public String getCollectionName() { - return this.get(MONGO_COLLECTION_PREFIX, "rya"); - } - - public void setCollectionName(String name) { - this.set(MONGO_COLLECTION_PREFIX, name); - } - - public String getMongoInstance() { - return this.get(MONGO_INSTANCE, "localhost"); - } - - public void setMongoInstance(String name) { - this.set(MONGO_INSTANCE, name); - } - - public String getMongoPort() { - return this.get(MONGO_INSTANCE_PORT, "27017"); - } - - public void setMongoPort(String name) { - this.set(MONGO_INSTANCE_PORT, name); - } - - public String getMongoDBName() { - return this.get(MONGO_DB_NAME, "rya"); - } - - public void setMongoDBName(String name) { - this.set(MONGO_DB_NAME, name); - } - - public String getNameSpacesCollectionName() { - return this.get(MONGO_COLLECTION_PREFIX, "rya") + "_ns"; - } - - public void setAdditionalIndexers(Class... indexers) { - List strs = Lists.newArrayList(); - for (Class ai : indexers){ - strs.add(ai.getName()); - } - - setStrings(CONF_ADDITIONAL_INDEXERS, strs.toArray(new String[]{})); - } - - public List getAdditionalIndexers() { - return getInstances(CONF_ADDITIONAL_INDEXERS, RyaSecondaryIndexer.class); - } - - - - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java deleted file mode 100644 index 1f341dcf3..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java +++ /dev/null @@ -1,222 +0,0 @@ -package mvm.rya.mongodb; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.IOException; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.RyaNamespaceManager; -import mvm.rya.api.persist.index.RyaSecondaryIndexer; -import mvm.rya.api.persist.query.RyaQueryEngine; -import mvm.rya.mongodb.dao.MongoDBNamespaceManager; -import mvm.rya.mongodb.dao.MongoDBStorageStrategy; -import mvm.rya.mongodb.dao.SimpleMongoDBNamespaceManager; -import mvm.rya.mongodb.dao.SimpleMongoDBStorageStrategy; - -import org.apache.commons.io.IOUtils; - -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.mongodb.InsertOptions; -import com.mongodb.MongoClient; -import com.mongodb.MongoCredential; -import com.mongodb.ServerAddress; - -import de.flapdoodle.embed.mongo.distribution.Version; -import de.flapdoodle.embed.mongo.tests.MongodForTestsFactory; - -public class MongoDBRyaDAO implements RyaDAO{ - - - private MongoDBRdfConfiguration conf; - private MongoClient mongoClient; - private DB db; - private DBCollection coll; - private MongoDBQueryEngine queryEngine; - private MongoDBStorageStrategy storageStrategy; - private MongoDBNamespaceManager nameSpaceManager; - private MongodForTestsFactory testsFactory; - - private List secondaryIndexers; - - public MongoDBRyaDAO(MongoDBRdfConfiguration conf) throws RyaDAOException{ - this.conf = conf; - init(); - } - - public void setConf(MongoDBRdfConfiguration conf) { - this.conf = conf; - } - - public MongoDBRdfConfiguration getConf() { - return conf; - } - - public void init() throws RyaDAOException { - try { - boolean useMongoTest = conf.getUseTestMongo(); - if (useMongoTest) { - testsFactory = MongodForTestsFactory.with(Version.Main.PRODUCTION); - mongoClient = testsFactory.newMongo(); - int port = mongoClient.getServerAddressList().get(0).getPort(); - conf.set(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT, Integer.toString(port)); - } else { - ServerAddress server = new ServerAddress(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE), - Integer.valueOf(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT))); - if (conf.get(MongoDBRdfConfiguration.MONGO_USER) != null) { - MongoCredential cred = MongoCredential.createCredential( - conf.get(MongoDBRdfConfiguration.MONGO_USER), - conf.get(MongoDBRdfConfiguration.MONGO_USER_PASSWORD), - conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME).toCharArray()); - mongoClient = new MongoClient(server, Arrays.asList(cred)); - } else { - mongoClient = new MongoClient(server); - } - } - secondaryIndexers = conf.getAdditionalIndexers(); - for(RyaSecondaryIndexer index: secondaryIndexers) { - index.setConf(conf); - } - - db = mongoClient.getDB(conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME)); - coll = db.getCollection(conf.getTriplesCollectionName()); - nameSpaceManager = new SimpleMongoDBNamespaceManager(db.getCollection(conf.getNameSpacesCollectionName())); - queryEngine = new MongoDBQueryEngine(conf); - storageStrategy = new SimpleMongoDBStorageStrategy(); - storageStrategy.createIndices(coll); - - } catch (UnknownHostException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - public boolean isInitialized() throws RyaDAOException { - return true; - } - - public void destroy() throws RyaDAOException { - if (mongoClient != null) { - mongoClient.close(); - } - if (conf.getUseTestMongo()) { - testsFactory.shutdown(); - } - - IOUtils.closeQuietly(queryEngine); - } - - public void add(RyaStatement statement) throws RyaDAOException { - // add it to the collection - try { - coll.insert(storageStrategy.serialize(statement)); - for(RyaSecondaryIndexer index: secondaryIndexers) { - index.storeStatement(statement); - } - } - catch (com.mongodb.MongoException.DuplicateKey exception){ - // ignore - } - catch (com.mongodb.DuplicateKeyException exception){ - // ignore - } - catch (Exception ex){ - // ignore single exceptions - ex.printStackTrace(); - } - } - - public void add(Iterator statement) throws RyaDAOException { - List dbInserts = new ArrayList(); - while (statement.hasNext()){ - RyaStatement ryaStatement = statement.next(); - DBObject insert = storageStrategy.serialize(ryaStatement); - dbInserts.add(insert); - - try { - for (RyaSecondaryIndexer index : secondaryIndexers) { - index.storeStatement(ryaStatement); - } - } catch (IOException e) { - throw new RyaDAOException(e); - } - - } - coll.insert(dbInserts, new InsertOptions().continueOnError(true)); - } - - public void delete(RyaStatement statement, MongoDBRdfConfiguration conf) - throws RyaDAOException { - DBObject obj = storageStrategy.serialize(statement); - coll.remove(obj); - } - - public void dropGraph(MongoDBRdfConfiguration conf, RyaURI... graphs) - throws RyaDAOException { - - } - - public void delete(Iterator statements, - MongoDBRdfConfiguration conf) throws RyaDAOException { - while (statements.hasNext()){ - RyaStatement ryaStatement = statements.next(); - coll.remove(storageStrategy.serialize(ryaStatement)); - } - - } - - public String getVersion() throws RyaDAOException { - return "1.0"; - } - - public RyaQueryEngine getQueryEngine() { - return queryEngine; - } - - public RyaNamespaceManager getNamespaceManager() { - return nameSpaceManager; - } - - public void purge(RdfCloudTripleStoreConfiguration configuration) { - // TODO Auto-generated method stub - - } - - public void dropAndDestroy() throws RyaDAOException { - db.dropDatabase(); // this is dangerous! - } - - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java deleted file mode 100644 index fd9b65930..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java +++ /dev/null @@ -1,35 +0,0 @@ -package mvm.rya.mongodb.dao; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaNamespaceManager; -import mvm.rya.api.persist.query.RyaQuery; -import mvm.rya.mongodb.MongoDBRdfConfiguration; - -import com.mongodb.DBCollection; -import com.mongodb.DBObject; - -public interface MongoDBNamespaceManager extends RyaNamespaceManager{ - - public void createIndices(DBCollection coll); - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java deleted file mode 100644 index 8a1004f2c..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java +++ /dev/null @@ -1,41 +0,0 @@ -package mvm.rya.mongodb.dao; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.query.RyaQuery; - -import com.mongodb.DBCollection; -import com.mongodb.DBObject; - -public interface MongoDBStorageStrategy { - - public DBObject getQuery(RyaStatement stmt); - - public RyaStatement deserializeDBObject(DBObject queryResult); - - public DBObject serialize(RyaStatement statement); - - public DBObject getQuery(RyaQuery ryaQuery); - - public void createIndices(DBCollection coll); - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java deleted file mode 100644 index 259420bdb..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java +++ /dev/null @@ -1,181 +0,0 @@ -package mvm.rya.mongodb.dao; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Map; - -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.mongodb.MongoDBRdfConfiguration; - -import org.apache.commons.codec.binary.Hex; -import org.openrdf.model.Namespace; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; - -public class SimpleMongoDBNamespaceManager implements MongoDBNamespaceManager { - - public class NamespaceImplementation implements Namespace { - - private String namespace; - private String prefix; - - public NamespaceImplementation(String namespace, String prefix) { - this.namespace = namespace; - this.prefix = prefix; - } - - @Override - public int compareTo(Namespace o) { - if (!namespace.equalsIgnoreCase(o.getName())) return namespace.compareTo(o.getName()); - if (!prefix.equalsIgnoreCase(o.getPrefix())) return prefix.compareTo(o.getPrefix()); - return 0; - } - - @Override - public String getName() { - return namespace; - } - - @Override - public String getPrefix() { - return prefix; - } - - } - - public class MongoCursorIteration implements - CloseableIteration { - private DBCursor cursor; - - public MongoCursorIteration(DBCursor cursor2) { - this.cursor = cursor2; - } - - @Override - public boolean hasNext() throws RyaDAOException { - return cursor.hasNext(); - } - - @Override - public Namespace next() throws RyaDAOException { - DBObject ns = cursor.next(); - Map values = ns.toMap(); - String namespace = (String) values.get(NAMESPACE); - String prefix = (String) values.get(PREFIX); - - Namespace temp = new NamespaceImplementation(namespace, prefix); - return temp; - } - - @Override - public void remove() throws RyaDAOException { - next(); - } - - @Override - public void close() throws RyaDAOException { - cursor.close(); - } - - } - - private static final String ID = "_id"; - private static final String PREFIX = "prefix"; - private static final String NAMESPACE = "namespace"; - private MongoDBRdfConfiguration conf; - private DBCollection nsColl; - - - public SimpleMongoDBNamespaceManager(DBCollection nameSpaceCollection) { - nsColl = nameSpaceCollection; - } - - @Override - public void createIndices(DBCollection coll){ - coll.createIndex(PREFIX); - coll.createIndex(NAMESPACE); - } - - - @Override - public void setConf(MongoDBRdfConfiguration paramC) { - this.conf = paramC; - } - - @Override - public MongoDBRdfConfiguration getConf() { - // TODO Auto-generated method stub - return conf; - } - - @Override - public void addNamespace(String prefix, String namespace) - throws RyaDAOException { - String id = prefix; - byte[] bytes = id.getBytes(); - try { - MessageDigest digest = MessageDigest.getInstance("SHA-1"); - bytes = digest.digest(bytes); - } catch (NoSuchAlgorithmException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - BasicDBObject doc = new BasicDBObject(ID, new String(Hex.encodeHex(bytes))) - .append(PREFIX, prefix) - .append(NAMESPACE, namespace); - nsColl.insert(doc); - - } - - @Override - public String getNamespace(String prefix) throws RyaDAOException { - DBObject query = new BasicDBObject().append(PREFIX, prefix); - DBCursor cursor = nsColl.find(query); - String nameSpace = prefix; - while (cursor.hasNext()){ - DBObject obj = cursor.next(); - nameSpace = (String) obj.toMap().get(NAMESPACE); - } - return nameSpace; - } - - @Override - public void removeNamespace(String prefix) throws RyaDAOException { - DBObject query = new BasicDBObject().append(PREFIX, prefix); - nsColl.remove(query); - } - - @Override - public CloseableIteration iterateNamespace() - throws RyaDAOException { - DBObject query = new BasicDBObject(); - DBCursor cursor = nsColl.find(query); - return new MongoCursorIteration(cursor); - } - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java deleted file mode 100644 index 24d16c1c2..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java +++ /dev/null @@ -1,152 +0,0 @@ -package mvm.rya.mongodb.dao; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Map; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.query.RyaQuery; - -import org.apache.commons.codec.binary.Hex; -import org.openrdf.model.impl.ValueFactoryImpl; - -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; - -public class SimpleMongoDBStorageStrategy implements MongoDBStorageStrategy { - - private static final String ID = "_id"; - private static final String OBJECT_TYPE = "objectType"; - private static final String CONTEXT = "context"; - private static final String PREDICATE = "predicate"; - private static final String OBJECT = "object"; - private static final String SUBJECT = "subject"; - private ValueFactoryImpl factory = new ValueFactoryImpl(); - - - public SimpleMongoDBStorageStrategy() { - } - - @Override - public void createIndices(DBCollection coll){ - coll.createIndex("subject"); - coll.createIndex("predicate"); - BasicDBObject doc = new BasicDBObject(); - doc.put(SUBJECT, 1); - doc.put(PREDICATE, 1); - coll.createIndex(doc); - doc = new BasicDBObject(OBJECT, 1); - doc.put(OBJECT_TYPE, 1); - doc.put(PREDICATE, 1); - coll.createIndex(doc); - doc = new BasicDBObject(OBJECT, 1); - doc.put(OBJECT_TYPE, 1); - coll.createIndex(doc); - doc = new BasicDBObject(OBJECT, 1); - doc = new BasicDBObject(OBJECT_TYPE, 1); - doc.put(SUBJECT, 1); - coll.createIndex(doc); - } - - @Override - public DBObject getQuery(RyaStatement stmt) { - RyaURI subject = stmt.getSubject(); - RyaURI predicate = stmt.getPredicate(); - RyaType object = stmt.getObject(); - RyaURI context = stmt.getContext(); - BasicDBObject query = new BasicDBObject(); - if (subject != null){ - query.append(SUBJECT, subject.getData()); - } - if (object != null){ - query.append(OBJECT, object.getData()); - query.append(OBJECT_TYPE, object.getDataType().toString()); - } - if (predicate != null){ - query.append(PREDICATE, predicate.getData()); - } - if (context != null){ - query.append(CONTEXT, context.getData()); - } - - return query; - } - - @Override - public RyaStatement deserializeDBObject(DBObject queryResult) { - Map result = queryResult.toMap(); - String subject = (String) result.get(SUBJECT); - String object = (String) result.get(OBJECT); - String objectType = (String) result.get(OBJECT_TYPE); - String predicate = (String) result.get(PREDICATE); - String context = (String) result.get(CONTEXT); - RyaType objectRya = null; - if (objectType.equalsIgnoreCase("http://www.w3.org/2001/XMLSchema#anyURI")){ - objectRya = new RyaURI(object); - } - else { - objectRya = new RyaType(factory.createURI(objectType), object); - } - - if (!context.isEmpty()){ - return new RyaStatement(new RyaURI(subject), new RyaURI(predicate), objectRya, - new RyaURI(context)); - } - return new RyaStatement(new RyaURI(subject), new RyaURI(predicate), objectRya); - } - - @Override - public DBObject serialize(RyaStatement statement){ - String context = ""; - if (statement.getContext() != null){ - context = statement.getContext().getData(); - } - String id = statement.getSubject().getData() + " " + - statement.getPredicate().getData() + " " + statement.getObject().getData() + " " + context; - byte[] bytes = id.getBytes(); - try { - MessageDigest digest = MessageDigest.getInstance("SHA-1"); - bytes = digest.digest(bytes); - } catch (NoSuchAlgorithmException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - BasicDBObject doc = new BasicDBObject(ID, new String(Hex.encodeHex(bytes))) - .append(SUBJECT, statement.getSubject().getData()) - .append(PREDICATE, statement.getPredicate().getData()) - .append(OBJECT, statement.getObject().getData()) - .append(OBJECT_TYPE, statement.getObject().getDataType().toString()) - .append(CONTEXT, context); - return doc; - - } - - @Override - public DBObject getQuery(RyaQuery ryaQuery) { - return getQuery(ryaQuery.getQuery()); - } - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java deleted file mode 100644 index ba37ca179..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java +++ /dev/null @@ -1,57 +0,0 @@ -package mvm.rya.mongodb.iter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; - -public class NonCloseableRyaStatementCursorIterator implements Iterator { - - RyaStatementCursorIterator iterator; - - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public RyaStatement next() { - return iterator.next(); - } - - public NonCloseableRyaStatementCursorIterator( - RyaStatementCursorIterator iterator) { - this.iterator = iterator; - } - - @Override - public void remove() { - try { - iterator.remove(); - } catch (RyaDAOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java deleted file mode 100644 index ce21ff724..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java +++ /dev/null @@ -1,108 +0,0 @@ -package mvm.rya.mongodb.iter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Iterator; -import java.util.Map; -import java.util.Map.Entry; - -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.mongodb.dao.MongoDBStorageStrategy; - -import org.openrdf.query.BindingSet; - -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; - -public class RyaStatementBindingSetCursorIterator implements CloseableIteration, RyaDAOException> { - - private DBCollection coll; - private Map rangeMap; - private Iterator queryIterator; - private Long maxResults; - private DBCursor currentCursor; - private BindingSet currentBindingSet; - private MongoDBStorageStrategy strategy; - - public RyaStatementBindingSetCursorIterator(DBCollection coll, - Map rangeMap, MongoDBStorageStrategy strategy) { - this.coll = coll; - this.rangeMap = rangeMap; - this.queryIterator = rangeMap.keySet().iterator(); - this.strategy = strategy; - } - - @Override - public boolean hasNext() { - if (!currentCursorIsValid()) { - findNextValidCursor(); - } - return currentCursorIsValid(); - } - - @Override - public Entry next() { - if (!currentCursorIsValid()) { - findNextValidCursor(); - } - if (currentCursorIsValid()) { - // convert to Rya Statement - DBObject queryResult = currentCursor.next(); - RyaStatement statement = strategy.deserializeDBObject(queryResult); - return new RdfCloudTripleStoreUtils.CustomEntry(statement, currentBindingSet); - } - return null; - } - - private void findNextValidCursor() { - while (queryIterator.hasNext()){ - DBObject currentQuery = queryIterator.next(); - currentCursor = coll.find(currentQuery); - currentBindingSet = rangeMap.get(currentQuery); - if (currentCursor.hasNext()) break; - } - } - - private boolean currentCursorIsValid() { - return (currentCursor != null) && currentCursor.hasNext(); - } - - - public void setMaxResults(Long maxResults) { - this.maxResults = maxResults; - } - - @Override - public void close() throws RyaDAOException { - // TODO don't know what to do here - } - - @Override - public void remove() throws RyaDAOException { - next(); - } - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java deleted file mode 100644 index 83bd2d418..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java +++ /dev/null @@ -1,67 +0,0 @@ -package mvm.rya.mongodb.iter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Map.Entry; -import java.util.Set; - -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; - -import org.calrissian.mango.collect.CloseableIterable; -import org.calrissian.mango.collect.CloseableIterator; -import org.openrdf.query.BindingSet; - -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; - -public class RyaStatementCursorIterable implements CloseableIterable { - - - private NonCloseableRyaStatementCursorIterator iterator; - - public RyaStatementCursorIterable(NonCloseableRyaStatementCursorIterator iterator) { - this.iterator = iterator; - } - - @Override - public Iterator iterator() { - // TODO Auto-generated method stub - return iterator; - } - - @Override - public void closeQuietly() { - //TODO don't know what to do here - } - - @Override - public void close() throws IOException { - // TODO Auto-generated method stub - } - -} diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java deleted file mode 100644 index 8df2c60ce..000000000 --- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java +++ /dev/null @@ -1,104 +0,0 @@ -package mvm.rya.mongodb.iter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Iterator; -import java.util.Map.Entry; -import java.util.Set; - -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.mongodb.dao.MongoDBStorageStrategy; - -import org.calrissian.mango.collect.CloseableIterable; -import org.openrdf.query.BindingSet; - -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; - -public class RyaStatementCursorIterator implements CloseableIteration { - - private DBCollection coll; - private Iterator queryIterator; - private DBCursor currentCursor; - private MongoDBStorageStrategy strategy; - private Long maxResults; - - public RyaStatementCursorIterator(DBCollection coll, Set queries, MongoDBStorageStrategy strategy) { - this.coll = coll; - this.queryIterator = queries.iterator(); - this.strategy = strategy; - } - - @Override - public boolean hasNext() { - if (!currentCursorIsValid()) { - findNextValidCursor(); - } - return currentCursorIsValid(); - } - - @Override - public RyaStatement next() { - if (!currentCursorIsValid()) { - findNextValidCursor(); - } - if (currentCursorIsValid()) { - // convert to Rya Statement - DBObject queryResult = currentCursor.next(); - RyaStatement statement = strategy.deserializeDBObject(queryResult); - return statement; - } - return null; - } - - private void findNextValidCursor() { - while (queryIterator.hasNext()){ - DBObject currentQuery = queryIterator.next(); - currentCursor = coll.find(currentQuery); - if (currentCursor.hasNext()) break; - } - } - - private boolean currentCursorIsValid() { - return (currentCursor != null) && currentCursor.hasNext(); - } - - - public void setMaxResults(Long maxResults) { - this.maxResults = maxResults; - } - - @Override - public void close() throws RyaDAOException { - // TODO don't know what to do here - } - - @Override - public void remove() throws RyaDAOException { - next(); - } - -} diff --git a/dao/pom.xml b/dao/pom.xml deleted file mode 100644 index 604b30cd3..000000000 --- a/dao/pom.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - - rya.dao - Apache Rya DAO Projects - - pom - - - accumulo.rya - mongodb.rya - - diff --git a/extras/indexing/pom.xml b/extras/indexing/pom.xml deleted file mode 100644 index f484916ee..000000000 --- a/extras/indexing/pom.xml +++ /dev/null @@ -1,128 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.extras - 3.2.10-SNAPSHOT - - - rya.indexing - Apache Rya Secondary Indexing - - - - org.apache.rya - rya.sail - - - hsqldb - hsqldb - - - - - - org.apache.rya - accumulo.rya - - - org.apache.rya - mongodb.rya - - - org.apache.rya - rya.prospector - - - - - org.apache.lucene - lucene-core - - - org.apache.lucene - lucene-analyzers - - - - commons-codec - commons-codec - - - - - org.locationtech.geomesa - geomesa-accumulo-datastore - - - - junit - junit - test - - - - - - org.apache.maven.plugins - maven-shade-plugin - - - - true - map-reduce - - - - - - - accumulo-server - package - - shade - - - true - accumulo-server - - - org.locationtech.geomesa:* - scala:* - org.apache.accumulo:* - org.apache.thrift:* - org.apache.hadoop:* - org.apache.zookeeper:* - - - - - - - - - - - - diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java b/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java deleted file mode 100644 index fefd65141..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java +++ /dev/null @@ -1,31 +0,0 @@ -package mvm.rya.accumulo.documentIndex; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -public class DocIndexIteratorUtil { - - - - public static final String DOC_ID_INDEX_DELIM = "\u001D" + "\u001E"; - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java b/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java deleted file mode 100644 index ad38b2bcb..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java +++ /dev/null @@ -1,850 +0,0 @@ -package mvm.rya.accumulo.documentIndex; - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; - -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.data.ArrayByteSequence; -import org.apache.accumulo.core.data.ByteSequence; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.PartialKey; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.iterators.IteratorEnvironment; -import org.apache.accumulo.core.iterators.SortedKeyValueIterator; -import org.apache.accumulo.core.tabletserver.thrift.TabletClientService; -import org.apache.accumulo.core.util.TextUtil; -import org.apache.commons.codec.binary.Base64; -import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; - -/** - * This iterator facilitates document-partitioned indexing. It involves grouping a set of documents together and indexing those documents into a single row of - * an Accumulo table. This allows a tablet server to perform boolean AND operations on terms in the index. - * - * The table structure should have the following form: - * - * row: shardID, colfam: term, colqual: docID - * - * When you configure this iterator with a set of terms (column families), it will return only the docIDs that appear with all of the specified terms. The - * result will have an empty column family, as follows: - * - * row: shardID, colfam: (empty), colqual: docID - * - * This iterator is commonly used with BatchScanner or AccumuloInputFormat, to parallelize the search over all shardIDs. - * - * This iterator will *ignore* any columnFamilies passed to {@link #seek(Range, Collection, boolean)} as it performs intersections over terms. Extending classes - * should override the {@link TermSource#seekColfams} in their implementation's {@link #init(SortedKeyValueIterator, Map, IteratorEnvironment)} method. - * - * README.shard in docs/examples shows an example of using the IntersectingIterator. - */ -public class DocumentIndexIntersectingIterator implements SortedKeyValueIterator { - - - - - protected Text nullText = new Text(); - - protected Text getRow(Key key) { - return key.getRow(); - } - - protected Text getTerm(Key key) { - return key.getColumnFamily(); - } - - protected Text getTermCond(Key key) { - return key.getColumnQualifier(); - } - - protected Key buildKey(Text row, TextColumn column) { - return new Key(row, (column.getColumnFamily() == null) ? nullText: column.getColumnFamily(), column.getColumnQualifier()); - } - - protected Key buildKey(Text row, Text term) { - return new Key(row, (term == null) ? nullText : term); - } - - protected Key buildKey(Text row, Text term, Text termCond) { - return new Key(row, (term == null) ? nullText : term, termCond); - } - - protected Key buildFollowRowKey(Key key, Text term, Text termCond) { - return new Key(getRow(key.followingKey(PartialKey.ROW)),(term == null) ? nullText : term, termCond); - } - - protected static final Logger log = Logger.getLogger(DocumentIndexIntersectingIterator.class); - - public static class TermSource { - public SortedKeyValueIterator iter; - public Text term; - public Text termCond; - public Collection seekColfams; - public TextColumn column; - public boolean isPrefix; - public Key top ; - public Key next ; - public Text currentCQ; - private boolean seeked = false; - - public TermSource(TermSource other) { - - this.iter = other.iter; - this.term = other.term; - this.termCond = other.termCond; - this.seekColfams = other.seekColfams; - this.column = other.column; - this.top = other.top; - this.next = other.next; - this.currentCQ = other.currentCQ; - this.isPrefix = other.isPrefix; - } - - - public TermSource(SortedKeyValueIterator iter, TextColumn column) { - - this.iter = iter; - this.column = column; - this.term = column.getColumnFamily(); - this.termCond = column.getColumnQualifier(); - this.currentCQ = new Text(emptyByteArray); - this.seekColfams = Collections. singletonList(new ArrayByteSequence(term - .getBytes(), 0, term.getLength())); - - } - - - - public void seek(Range r) throws IOException { - - if (seeked) { - - if (next != null && !r.beforeStartKey(next)) { - if (next.getColumnFamily().equals(term)) { - this.updateTop(); - } - } else if (iter.hasTop()) { - iter.seek(r, seekColfams, true); - this.updateTopNext(); - } else { - top = null; - next = null; - - } - } else { - - iter.seek(r, seekColfams, true); - this.updateTopNext(); - seeked = true; - } - - } - - - public void next() throws IOException { - - this.updateTop(); - } - - public void updateTop() throws IOException { - - top = next; - if (next != null) { - iter.next(); - if (iter.hasTop()) { - next = iter.getTopKey(); - } else { - next = null; - } - } - - } - - public void updateTopNext() throws IOException { - - if (iter.hasTop()) { - top = iter.getTopKey(); - } else { - top = null; - next = null; - return; - } - - iter.next(); - - if(iter.hasTop()) { - next = iter.getTopKey(); - } else { - next = null; - } - } - - public boolean hasTop() { - return top != null; - } - - - public String getTermString() { - return (this.term == null) ? new String("Iterator") : this.term.toString(); - } - } - - TermSource[] sources; - int sourcesCount = 0; - Range overallRange; - - // query-time settings - protected Text currentRow = null; - protected Text currentTermCond = new Text(emptyByteArray); - static final byte[] emptyByteArray = new byte[0]; - - protected Key topKey = null; - protected Value value = new Value(emptyByteArray); - protected String ctxt = null; - protected boolean hasContext = false; - protected boolean termCondSet = false; - - public DocumentIndexIntersectingIterator() {} - - @Override - public SortedKeyValueIterator deepCopy(IteratorEnvironment env) { - //log.info("Calling deep copy on " + this); - return new DocumentIndexIntersectingIterator(this, env); - } - - private DocumentIndexIntersectingIterator(DocumentIndexIntersectingIterator other, IteratorEnvironment env) { - if (other.sources != null) { - sourcesCount = other.sourcesCount; - sources = new TermSource[sourcesCount]; - for (int i = 0; i < sourcesCount; i++) { - sources[i] = new TermSource(other.sources[i].iter.deepCopy(env), other.sources[i].column); - } - } - } - - @Override - public Key getTopKey() { - - return topKey; - } - - @Override - public Value getTopValue() { - // we don't really care about values - return value; - } - - @Override - public boolean hasTop() { - return currentRow != null; - } - - // precondition: currentRow is not null - private boolean seekOneSource(int sourceID) throws IOException { - // find the next key in the appropriate column family that is at or - // beyond the cursor (currentRow, currentCQ) - // advance the cursor if this source goes beyond it - // return whether we advanced the cursor - - // within this loop progress must be made in one of the following forms: - // - currentRow or currentCQ must be increased - // - the given source must advance its iterator - // this loop will end when any of the following criteria are met - // - the iterator for the given source is pointing to the key - // (currentRow, columnFamilies[sourceID], currentCQ) - // - the given source is out of data and currentRow is set to null - // - the given source has advanced beyond the endRow and currentRow is - // set to null - boolean advancedCursor = false; - - - - - - while (true) { - -// if(currentRow.toString().equals(s)) { -// log.info("Source id is " + sourceID); -// if (sources[sourceID].top != null) { -// log.info("Top row is " + getRow(sources[sourceID].top)); -// log.info("Top cq is " + getTermCond(sources[sourceID].top)); -// } -// if (sources[sourceID].next != null) { -// log.info("Next row is " + getRow(sources[sourceID].next)); -// log.info("Next termCond is " + getTermCond(sources[sourceID].next)); -// } -// } - - if (sources[sourceID].hasTop() == false) { - currentRow = null; - // setting currentRow to null counts as advancing the cursor - return true; - } - // check if we're past the end key - int endCompare = -1; - // we should compare the row to the end of the range - - if (overallRange.getEndKey() != null) { - endCompare = overallRange.getEndKey().getRow().compareTo(sources[sourceID].top.getRow()); - if ((!overallRange.isEndKeyInclusive() && endCompare <= 0) || endCompare < 0) { - currentRow = null; - // setting currentRow to null counts as advancing the cursor - return true; - } - } - - - - int rowCompare = currentRow.compareTo(getRow(sources[sourceID].top)); - // check if this source is already at or beyond currentRow - // if not, then seek to at least the current row - - - - if (rowCompare > 0) { - // seek to at least the currentRow - Key seekKey = buildKey(currentRow, sources[sourceID].term); - sources[sourceID].seek(new Range(seekKey, true, null, false)); - - continue; - } - // check if this source has gone beyond currentRow - // if so, advance currentRow - if (rowCompare < 0) { - currentRow.set(getRow(sources[sourceID].top)); - //log.info("Current row is " + currentRow); - advancedCursor = true; - continue; - } - // we have verified that the current source is positioned in - // currentRow - // now we must make sure we're in the right columnFamily in the - // current row - // Note: Iterators are auto-magically set to the correct - // columnFamily - - if (sources[sourceID].column.isValid()) { - - boolean isPrefix = false; - boolean contextEqual = false; - String tempContext = ""; - - int termCompare; - - String[] cQ = getTermCond(sources[sourceID].top).toString().split("\u0000"); - tempContext = cQ[0]; - - if (!hasContext && ctxt == null) { - ctxt = cQ[0]; - } - - contextEqual = ctxt.equals(cQ[0]); - - String s1 = sources[sourceID].termCond.toString(); - String s2 = cQ[1] + "\u0000" + cQ[2]; - - if (sources[sourceID].isPrefix) { - isPrefix = s2.startsWith(s1 + "\u0000"); - } else { - isPrefix = s2.startsWith(s1); - } - - termCompare = (contextEqual && isPrefix) ? 0 : (ctxt + "\u0000" + s1).compareTo(cQ[0] + "\u0000" + s2); - - // if(currentRow.toString().equals(s)) { - // log.info("Term compare is " + termCompare); - // } - - // check if this source is already on the right columnFamily - // if not, then seek forwards to the right columnFamily - if (termCompare > 0) { - Key seekKey = buildKey(currentRow, sources[sourceID].term, new Text(ctxt + - "\u0000" + sources[sourceID].termCond.toString())); - sources[sourceID].seek(new Range(seekKey, true, null, false)); - - continue; - } - // check if this source is beyond the right columnFamily - // if so, then seek to the next row - if (termCompare < 0) { - // we're out of entries in the current row, so seek to the - // next one - - if (endCompare == 0) { - // we're done - currentRow = null; - // setting currentRow to null counts as advancing the - // cursor - return true; - } - - - - //advance to next row if context set - all entries in given row exhausted - if (hasContext || tempContext.length() == 0) { - Key seekKey = buildFollowRowKey(sources[sourceID].top, sources[sourceID].term, - new Text(ctxt + "\u0000" + sources[sourceID].termCond.toString())); - sources[sourceID].seek(new Range(seekKey, true, null, false)); - } else { - - if(contextEqual && !isPrefix) { - Key seekKey = buildKey(currentRow, sources[sourceID].term, new Text(ctxt + "\u0001")); - sources[sourceID].seek(new Range(seekKey, true, null, false)); - if(sources[sourceID].top != null) { - ctxt = getTermCond(sources[sourceID].top).toString().split("\u0000")[0]; - } - } else { - Key seekKey = buildKey(currentRow, sources[sourceID].term, new Text(tempContext + - "\u0000" + sources[sourceID].termCond.toString())); - sources[sourceID].seek(new Range(seekKey, true, null, false)); - if(sources[sourceID].top != null) { - ctxt = getTermCond(sources[sourceID].top).toString().split("\u0000")[0]; - } - } - - } - - -// if(currentRow.toString().equals(s)) { -// log.info("current term cond is " + currentTermCond); -// -// } - - - continue; - } - } - - - - - - - - - - - //set currentTermCond -- gets appended to end of currentKey column qualifier - //used to determine which term iterator to advance when a new iterator is created - - sources[sourceID].currentCQ.set(getTermCond(sources[sourceID].top)); - - if (sources[sourceID].next != null) { - - //is hasContext, only consider sourceID with next having designated context - //otherwise don't set currentTermCond - if (!termCondSet && hasContext) { - if (sources[sourceID].next.getRow().equals(currentRow) - && sources[sourceID].next.getColumnQualifier().toString() - .startsWith(ctxt + "\u0000" + sources[sourceID].termCond.toString())) { - currentTermCond.set(new Text(Integer.toString(sourceID))); - termCondSet = true; - } - } else if(!termCondSet){ - String[] cq = getTermCond(sources[sourceID].next).toString().split("\u0000"); - - //set currentTermCond with preference given to sourceID having next with same context - //otherwise set currentTermCond sourceID with next having termCond as prefix - if (sources[sourceID].next.getRow().equals(currentRow)) { - if (sources[sourceID].next.getColumnQualifier().toString() - .startsWith(ctxt + "\u0000" + sources[sourceID].termCond.toString())) { - currentTermCond.set(new Text(Integer.toString(sourceID))); - termCondSet = true; - } else if ((cq[1] + "\u0000" + cq[2]).startsWith(sources[sourceID].termCond.toString())) { - currentTermCond.set(new Text(Integer.toString(sourceID))); - } - } - } - } - - - break; - } - - return advancedCursor; - } - - @Override - public void next() throws IOException { - if (currentRow == null) { - return; - } - - - - if(currentTermCond.getLength() != 0) { - - int id = Integer.parseInt(currentTermCond.toString()); - - sources[id].next(); - currentTermCond.set(emptyByteArray); - termCondSet = false; - if(sources[id].top != null && !hasContext) { - ctxt = getTermCond(sources[id].top).toString().split("\u0000")[0]; - } - advanceToIntersection(); - return; - } - - sources[0].next(); - if(sources[0].top != null && !hasContext) { - ctxt = getTermCond(sources[0].top).toString().split("\u0000")[0]; - } - advanceToIntersection(); - } - - protected void advanceToIntersection() throws IOException { - boolean cursorChanged = true; - while (cursorChanged) { - // seek all of the sources to at least the highest seen column qualifier in the current row - cursorChanged = false; - for (int i = 0; i < sourcesCount; i++) { -// log.info("New sourceID is " + i); - if (currentRow == null) { - topKey = null; - return; - } - if (seekOneSource(i)) { - currentTermCond.set(emptyByteArray); - termCondSet = false; - cursorChanged = true; - break; - } - } - } - String cq = ""; - for(int i = 0; i < sourcesCount; i++) { - cq = cq + sources[i].currentCQ.toString() + DocIndexIteratorUtil.DOC_ID_INDEX_DELIM; - } - - if (currentTermCond.getLength() == 0) { - topKey = buildKey(currentRow, nullText, new Text(cq + -1)); - } else { - topKey = buildKey(currentRow, nullText, new Text(cq + currentTermCond.toString())); - } - } - - public static String stringTopKey(SortedKeyValueIterator iter) { - if (iter.hasTop()) - return iter.getTopKey().toString(); - return ""; - } - - private static final String columnOptionName = "columns"; - private static final String columnPrefix = "prefixes"; - private static final String context = "context"; - - - - protected static String encodeColumns(TextColumn[] columns) { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < columns.length; i++) { - sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i].getColumnFamily())))); - sb.append('\n'); - sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i].getColumnQualifier())))); - sb.append('\u0001'); - } - return sb.toString(); - } - - - - protected static TextColumn[] decodeColumns(String columns) { - String[] columnStrings = columns.split("\u0001"); - TextColumn[] columnTexts = new TextColumn[columnStrings.length]; - for (int i = 0; i < columnStrings.length; i++) { - String[] columnComponents = columnStrings[i].split("\n"); - columnTexts[i] = new TextColumn(new Text(Base64.decodeBase64(columnComponents[0].getBytes())), - new Text(Base64.decodeBase64(columnComponents[1].getBytes()))); - } - return columnTexts; - } - - - - - - /** - * @param context - * @return encoded context - */ - protected static String encodeContext(String context) { - - return new String(Base64.encodeBase64(context.getBytes())); - } - - - - /** - * @param context - * @return decoded context - */ - protected static String decodeContext(String context) { - - if (context == null) { - return null; - } else { - return new String(Base64.decodeBase64(context.getBytes())); - } - } - - - - - - protected static String encodeBooleans(boolean[] prefixes) { - byte[] bytes = new byte[prefixes.length]; - for (int i = 0; i < prefixes.length; i++) { - if (prefixes[i]) - bytes[i] = 1; - else - bytes[i] = 0; - } - return new String(Base64.encodeBase64(bytes)); - } - - /** - * @param flags - * @return decoded flags - */ - protected static boolean[] decodeBooleans(String prefixes) { - // return null of there were no flags - if (prefixes == null) - return null; - - byte[] bytes = Base64.decodeBase64(prefixes.getBytes()); - boolean[] bFlags = new boolean[bytes.length]; - for (int i = 0; i < bytes.length; i++) { - if (bytes[i] == 1) - bFlags[i] = true; - else - bFlags[i] = false; - } - return bFlags; - } - - - - - - - - - @Override - public void init(SortedKeyValueIterator source, Map options, IteratorEnvironment env) throws IOException { - TextColumn[] terms = decodeColumns(options.get(columnOptionName)); - boolean[] prefixes = decodeBooleans(options.get(columnPrefix)); - ctxt = decodeContext(options.get(context)); - - if(ctxt != null) { - hasContext = true; - } - - - - if (terms.length < 2) { - throw new IllegalArgumentException("IntersectionIterator requires two or more columns families"); - } - - sources = new TermSource[terms.length]; - sources[0] = new TermSource(source, terms[0]); - for (int i = 1; i < terms.length; i++) { - //log.info("For decoded column " + i + " column family is " + terms[i].getColumnFamily() + " and qualifier is " + terms[i].getColumnQualifier()); - sources[i] = new TermSource(source.deepCopy(env), terms[i]); - sources[i].isPrefix = prefixes[i]; - } - sourcesCount = terms.length; - } - - @Override - public void seek(Range range, Collection seekColumnFamilies, boolean inclusive) throws IOException { - overallRange = new Range(range); - currentRow = new Text(); - currentTermCond.set(emptyByteArray); - termCondSet = false; - - - -// log.info("Calling seek with range " + range); - - // seek each of the sources to the right column family within the row - // given by key - - Key sourceKey; - - if (rangeCqValid(range)) { - - String[] cqInfo = cqParser(range.getStartKey().getColumnQualifier()); - int id = Integer.parseInt(cqInfo[1]); - - - - if (id >= 0) { - for (int i = 0; i < sourcesCount; i++) { - - if (i == id) { - sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term, new Text(cqInfo[0])); - sources[i].seek(new Range(sourceKey, true, null, false)); - sources[i].next(); - if(!hasContext && sources[i].hasTop()) { - ctxt = getTermCond(sources[i].top).toString().split("\u0000")[0]; - } - } else { - sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term); - sources[i].seek(new Range(sourceKey, true, null, false)); - } - } - } else { - - - for (int i = 0; i < sourcesCount; i++) { - sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term, range.getStartKey() - .getColumnQualifier()); - sources[i].seek(new Range(sourceKey, true, null, false)); - } - } - - - } else { - -// log.info("Range is invalid."); - for (int i = 0; i < sourcesCount; i++) { - - if (range.getStartKey() != null) { - - sourceKey = buildKey(getRow(range.getStartKey()), sources[i].term); - - // Seek only to the term for this source as a column family - sources[i].seek(new Range(sourceKey, true, null, false)); - } else { - // Seek only to the term for this source as a column family - - sources[i].seek(range); - } - } - } - - advanceToIntersection(); - - } - - - private String[] cqParser(Text cq) { - - String cQ = cq.toString(); - String[] cqComponents = cQ.split(DocIndexIteratorUtil.DOC_ID_INDEX_DELIM); - int id = -1; - String[] valPos = new String[2]; - - - - - if(cqComponents.length > 1) { - id = Integer.parseInt(cqComponents[cqComponents.length-1]); - if (id >= 0) { - valPos[0] = cqComponents[id].toString(); - valPos[1] = "" + id; - } else { - valPos[0] = cqComponents[0].toString(); - valPos[1] = "" + id; - } - } else { - valPos[0] = cq.toString(); - valPos[1] = "" + -1; - } - - return valPos; - - } - - - private boolean rangeCqValid(Range range) { - return (range.getStartKey() != null) && (range.getStartKey().getColumnQualifier() != null); - } - - - - public void addSource(SortedKeyValueIterator source, IteratorEnvironment env, TextColumn column) { - // Check if we have space for the added Source - if (sources == null) { - sources = new TermSource[1]; - } else { - // allocate space for node, and copy current tree. - // TODO: Should we change this to an ArrayList so that we can just add() ? - ACCUMULO-1309 - TermSource[] localSources = new TermSource[sources.length + 1]; - int currSource = 0; - for (TermSource myTerm : sources) { - // TODO: Do I need to call new here? or can I just re-use the term? - ACCUMULO-1309 - localSources[currSource] = new TermSource(myTerm); - currSource++; - } - sources = localSources; - } - sources[sourcesCount] = new TermSource(source.deepCopy(env), column); - sourcesCount++; - } - - /** - * Encode the columns to be used when iterating. - * - * @param cfg - * @param columns - */ - public static void setColumnFamilies(IteratorSetting cfg, TextColumn[] columns) { - if (columns.length < 2) - throw new IllegalArgumentException("Must supply at least two terms to intersect"); - - boolean[] prefix = new boolean[columns.length]; - - for(int i = 0; i < columns.length; i++) { - prefix[i] = columns[i].isPrefix(); - } - - - - cfg.addOption(DocumentIndexIntersectingIterator.columnPrefix, DocumentIndexIntersectingIterator.encodeBooleans(prefix)); - cfg.addOption(DocumentIndexIntersectingIterator.columnOptionName, DocumentIndexIntersectingIterator.encodeColumns(columns)); - } - - - - - - public static void setContext(IteratorSetting cfg, String context) { - - cfg.addOption(DocumentIndexIntersectingIterator.context, DocumentIndexIntersectingIterator.encodeContext(context)); - - } - - - - - - - - - - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java b/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java deleted file mode 100644 index 661f62b56..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java +++ /dev/null @@ -1,108 +0,0 @@ -package mvm.rya.accumulo.documentIndex; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.apache.hadoop.io.Text; - -public class TextColumn { - - - private Text columnFamily; - private Text columnQualifier; - private boolean isPrefix = false; - - - - public TextColumn(Text columnFamily, Text columnQualifier) { - this.columnFamily = columnFamily; - this.columnQualifier = columnQualifier; - } - - - public TextColumn(TextColumn other) { - - this.columnFamily = new Text(other.columnFamily); - this.columnQualifier = new Text(other.columnQualifier); - this.isPrefix = other.isPrefix; - - } - - - public Text getColumnFamily() { - return columnFamily; - } - - - public boolean isPrefix() { - return isPrefix; - } - - - public void setIsPrefix(boolean isPrefix) { - this.isPrefix = isPrefix; - } - - - public boolean isValid() { - return (columnFamily != null && columnQualifier != null); - } - - - - public Text getColumnQualifier() { - return columnQualifier; - } - - - public void setColumnFamily(Text cf) { - this.columnFamily = cf; - } - - public void setColumnQualifier(Text cq) { - this.columnQualifier = cq; - } - - public String toString() { - - return columnFamily.toString() + ", " + columnQualifier.toString() + ", prefix:" + isPrefix; - } - - @Override - public boolean equals(Object other) { - - if(other == null) { - return false; - } - - if(!(other instanceof TextColumn)) { - return false; - } - - TextColumn tc = (TextColumn) other; - - return this.columnFamily.equals(tc.columnFamily) && this.columnQualifier.equals(tc.columnQualifier) && this.isPrefix == tc.isPrefix; - - - - } - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java deleted file mode 100644 index 3d005cf58..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java +++ /dev/null @@ -1,70 +0,0 @@ -package mvm.rya.accumulo.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Set; - -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.indexing.FreeTextIndexer; -import mvm.rya.indexing.StatementContraints; - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -public class NullFreeTextIndexer extends AbstractAccumuloIndexer implements FreeTextIndexer { - - @Override - public String getTableName() { - return null; - } - - @Override - public void storeStatement(RyaStatement statement) throws IOException { - } - - @Override - public Configuration getConf() { - return null; - } - - @Override - public void setConf(Configuration arg0) { - } - - @Override - public CloseableIteration queryText(String query, StatementContraints contraints) - throws IOException { - return null; - } - - @Override - public Set getIndexablePredicates() { - return null; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java deleted file mode 100644 index b351c138b..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java +++ /dev/null @@ -1,121 +0,0 @@ -package mvm.rya.accumulo.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Set; - -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.StatementContraints; - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -import com.vividsolutions.jts.geom.Geometry; - -public class NullGeoIndexer extends AbstractAccumuloIndexer implements GeoIndexer { - - @Override - public String getTableName() { - - return null; - } - - @Override - public void storeStatement(RyaStatement statement) throws IOException { - - - } - - @Override - public Configuration getConf() { - - return null; - } - - @Override - public void setConf(Configuration arg0) { - - - } - - @Override - public CloseableIteration queryEquals(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public CloseableIteration queryDisjoint(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public CloseableIteration queryIntersects(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public CloseableIteration queryTouches(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public CloseableIteration queryCrosses(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public CloseableIteration queryWithin(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public CloseableIteration queryContains(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public CloseableIteration queryOverlaps(Geometry query, StatementContraints contraints) { - - return null; - } - - @Override - public Set getIndexablePredicates() { - - return null; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java deleted file mode 100644 index 153a3c346..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java +++ /dev/null @@ -1,154 +0,0 @@ -package mvm.rya.accumulo.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Collection; -import java.util.Set; - -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.TemporalIndexer; -import mvm.rya.indexing.TemporalInstant; -import mvm.rya.indexing.TemporalInterval; - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -/** - * Temporal Indexer that does nothing, like when disabled. - * - */ -public class NullTemporalIndexer extends AbstractAccumuloIndexer implements TemporalIndexer { - - @Override - public String getTableName() { - - return null; - } - - @Override - public void storeStatement(RyaStatement statement) throws IOException { - - - } - - @Override - public Configuration getConf() { - - return null; - } - - @Override - public void setConf(Configuration arg0) { - - - } - - @Override - public CloseableIteration queryInstantEqualsInstant(TemporalInstant queryInstant, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryInstantBeforeInstant(TemporalInstant queryInstant, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryInstantAfterInstant(TemporalInstant queryInstant, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryInstantBeforeInterval(TemporalInterval givenInterval, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryInstantAfterInterval(TemporalInterval givenInterval, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryInstantInsideInterval(TemporalInterval givenInterval, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryInstantHasBeginningInterval(TemporalInterval queryInterval, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryInstantHasEndInterval(TemporalInterval queryInterval, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryIntervalEquals(TemporalInterval query, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryIntervalBefore(TemporalInterval query, - StatementContraints contraints) throws QueryEvaluationException { - - return null; - } - - @Override - public CloseableIteration queryIntervalAfter(TemporalInterval query, StatementContraints contraints) - throws QueryEvaluationException { - - return null; - } - - @Override - public Set getIndexablePredicates() { - - return null; - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java deleted file mode 100644 index 8a0d59908..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java +++ /dev/null @@ -1,329 +0,0 @@ -package mvm.rya.accumulo.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.Closeable; -import java.io.Flushable; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.indexing.FreeTextIndexer; -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.TemporalIndexer; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.StatementSerializer; -import mvm.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer; -import mvm.rya.indexing.accumulo.geo.GeoMesaGeoIndexer; -import mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Mutation; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.OutputCommitter; -import org.apache.hadoop.mapreduce.OutputFormat; -import org.apache.hadoop.mapreduce.RecordWriter; -import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.apache.log4j.Logger; -import org.geotools.feature.SchemaException; -import org.openrdf.model.Statement; - -/** - * Hadoop Map/Reduce class to use Rya, the {@link GeoIndexer}, the {@link FreeTextIndexer}, and the {@link TemporalIndexer} as the sink of {@link Statement} data. - * wrapped in an {@link StatementWritable} objects. This {@link OutputFormat} ignores the Keys and only writes the Values to Rya. - * - * The user must specify connection parameters for Rya, {@link GeoIndexer}, {@link FreeTextIndexer}, and {@link TemporalIndexer}. - */ -public class RyaOutputFormat extends OutputFormat { - private static final Logger logger = Logger.getLogger(RyaOutputFormat.class); - - private static final String PREFIX = RyaOutputFormat.class.getSimpleName(); - private static final String MAX_MUTATION_BUFFER_SIZE = PREFIX + ".maxmemory"; - private static final String ENABLE_FREETEXT = PREFIX + ".freetext.enable"; - private static final String ENABLE_GEO = PREFIX + ".geo.enable"; - private static final String ENABLE_TEMPORAL = PREFIX + ".temporal.enable";; - - - @Override - public void checkOutputSpecs(JobContext jobContext) throws IOException, InterruptedException { - Configuration conf = jobContext.getConfiguration(); - - // make sure that all of the indexers can connect - getGeoIndexer(conf); - getFreeTextIndexer(conf); - getTemporalIndexer(conf); - getRyaIndexer(conf); - } - - @Override - public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { - // copied from AccumuloOutputFormat - return new NullOutputFormat().getOutputCommitter(context); - } - - @Override - public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { - return new RyaRecordWriter(context); - } - - private static GeoIndexer getGeoIndexer(Configuration conf) throws IOException { - if (!conf.getBoolean(ENABLE_GEO, true)) { - return new NullGeoIndexer(); - } - - GeoMesaGeoIndexer geo = new GeoMesaGeoIndexer(); - geo.setConf(conf); - return geo; - - } - - private static FreeTextIndexer getFreeTextIndexer(Configuration conf) throws IOException { - if (!conf.getBoolean(ENABLE_FREETEXT, true)) { - return new NullFreeTextIndexer(); - } - - AccumuloFreeTextIndexer freeText = new AccumuloFreeTextIndexer(); - freeText.setConf(conf); - return freeText; - - } - - private static TemporalIndexer getTemporalIndexer(Configuration conf) throws IOException { - if (!conf.getBoolean(ENABLE_TEMPORAL, true)) { - return new NullTemporalIndexer(); - } - AccumuloTemporalIndexer temporal = new AccumuloTemporalIndexer(); - temporal.setConf(conf); - return temporal; - } - - private static AccumuloRyaDAO getRyaIndexer(Configuration conf) throws IOException { - try { - AccumuloRyaDAO ryaIndexer = new AccumuloRyaDAO(); - Connector conn = ConfigUtils.getConnector(conf); - ryaIndexer.setConnector(conn); - - AccumuloRdfConfiguration ryaConf = new AccumuloRdfConfiguration(); - - String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null); - if (tablePrefix != null) { - ryaConf.setTablePrefix(tablePrefix); - } - ryaConf.setDisplayQueryPlan(false); - ryaIndexer.setConf(ryaConf); - ryaIndexer.init(); - return ryaIndexer; - } catch (AccumuloException e) { - logger.error("Cannot create RyaIndexer", e); - throw new IOException(e); - } catch (AccumuloSecurityException e) { - logger.error("Cannot create RyaIndexer", e); - throw new IOException(e); - } catch (RyaDAOException e) { - logger.error("Cannot create RyaIndexer", e); - throw new IOException(e); - } - } - - public static class RyaRecordWriter extends RecordWriter implements Closeable, Flushable { - private static final Logger logger = Logger.getLogger(RyaRecordWriter.class); - - private FreeTextIndexer freeTextIndexer; - private GeoIndexer geoIndexer; - private TemporalIndexer temporalIndexer; - private AccumuloRyaDAO ryaIndexer; - - private static final long ONE_MEGABYTE = 1024L * 1024L; - private static final long AVE_STATEMENT_SIZE = 100L; - - private long bufferSizeLimit; - private long bufferCurrentSize = 0; - - private ArrayList buffer; - - public RyaRecordWriter(TaskAttemptContext context) throws IOException { - this(context.getConfiguration()); - } - - public RyaRecordWriter(Configuration conf) throws IOException { - // set up the buffer - bufferSizeLimit = conf.getLong(MAX_MUTATION_BUFFER_SIZE, ONE_MEGABYTE); - int bufferCapacity = (int) (bufferSizeLimit / AVE_STATEMENT_SIZE); - buffer = new ArrayList(bufferCapacity); - - // set up the indexers - freeTextIndexer = getFreeTextIndexer(conf); - geoIndexer = getGeoIndexer(conf); - temporalIndexer = getTemporalIndexer(conf); - ryaIndexer = getRyaIndexer(conf); - - // update fields used for metrics - startTime = System.currentTimeMillis(); - lastCommitFinishTime = startTime; - } - - @Override - public void flush() throws IOException { - flushBuffer(); - } - - @Override - public void close() throws IOException { - close(null); - } - - @Override - public void close(TaskAttemptContext paramTaskAttemptContext) throws IOException { - // close everything. log errors - try { - flush(); - } catch (IOException e) { - logger.error("Error flushing the buffer on RyaOutputFormat Close", e); - } - try { - if (geoIndexer != null) - geoIndexer.close(); - } catch (IOException e) { - logger.error("Error closing the geoIndexer on RyaOutputFormat Close", e); - } - try { - if (freeTextIndexer != null) - freeTextIndexer.close(); - } catch (IOException e) { - logger.error("Error closing the freetextIndexer on RyaOutputFormat Close", e); - } - try { - if (temporalIndexer != null) - temporalIndexer.close(); - } catch (IOException e) { - logger.error("Error closing the temporalIndexer on RyaOutputFormat Close", e); - } - try { - ryaIndexer.destroy(); - } catch (RyaDAOException e) { - logger.error("Error closing RyaDAO on RyaOutputFormat Close", e); - } - } - - public void write(Statement statement) throws IOException, InterruptedException { - write(null, new StatementWritable(statement)); - } - - @Override - public void write(Writable key, StatementWritable value) throws IOException, InterruptedException { - buffer.add(RdfToRyaConversions.convertStatement(value)); - - bufferCurrentSize += StatementSerializer.writeStatement(value).length(); - - if (bufferCurrentSize >= bufferSizeLimit) { - flushBuffer(); - } - } - - // fields for storing metrics - private long startTime = 0; - private long lastCommitFinishTime = 0; - private long totalCommitRecords = 0; - - private double totalReadDuration = 0; - private double totalWriteDuration = 0; - - private long commitCount = 0; - - private void flushBuffer() throws IOException { - totalCommitRecords += buffer.size(); - commitCount++; - - long startCommitTime = System.currentTimeMillis(); - - logger.info(String.format("(C-%d) Flushing buffer with %,d objects and %,d bytes", commitCount, buffer.size(), - bufferCurrentSize)); - - double readingDuration = (startCommitTime - lastCommitFinishTime) / 1000.; - totalReadDuration += readingDuration; - double currentReadRate = buffer.size() / readingDuration; - double totalReadRate = totalCommitRecords / totalReadDuration; - - // Print "reading" metrics - logger.info(String.format("(C-%d) (Reading) Duration, Current Rate, Total Rate: %.2f %.2f %.2f ", commitCount, readingDuration, - currentReadRate, totalReadRate)); - - // write to geo - geoIndexer.storeStatements(buffer); - geoIndexer.flush(); - - // write to free text - freeTextIndexer.storeStatements(buffer); - freeTextIndexer.flush(); - - // write to temporal - temporalIndexer.storeStatements(buffer); - temporalIndexer.flush(); - - // write to rya - try { - ryaIndexer.add(buffer.iterator()); - } catch (RyaDAOException e) { - logger.error("Cannot writing statement to Rya", e); - throw new IOException(e); - } - - lastCommitFinishTime = System.currentTimeMillis(); - - double writingDuration = (lastCommitFinishTime - startCommitTime) / 1000.; - totalWriteDuration += writingDuration; - double currentWriteRate = buffer.size() / writingDuration; - double totalWriteRate = totalCommitRecords / totalWriteDuration; - - // Print "writing" stats - logger.info(String.format("(C-%d) (Writing) Duration, Current Rate, Total Rate: %.2f %.2f %.2f ", commitCount, writingDuration, - currentWriteRate, totalWriteRate)); - - double processDuration = writingDuration + readingDuration; - double totalProcessDuration = totalWriteDuration + totalReadDuration; - double currentProcessRate = buffer.size() / processDuration; - double totalProcessRate = totalCommitRecords / (totalProcessDuration); - - // Print "total" stats - logger.info(String.format("(C-%d) (Total) Duration, Current Rate, Total Rate: %.2f %.2f %.2f ", commitCount, processDuration, - currentProcessRate, totalProcessRate)); - - // clear the buffer - buffer.clear(); - bufferCurrentSize = 0L; - } - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java deleted file mode 100644 index aefdf7470..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java +++ /dev/null @@ -1,86 +0,0 @@ -package mvm.rya.accumulo.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import mvm.rya.indexing.accumulo.StatementSerializer; - -import org.apache.hadoop.io.Writable; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; - -/** - * A {@link Writable} wrapper for {@link Statement} objects. - */ -@SuppressWarnings("serial") -public class StatementWritable implements Statement, Writable { - - private Statement statement; - - public StatementWritable(Statement statement) { - setStatement(statement); - } - - public void setStatement(Statement statement) { - this.statement = statement; - } - - public Statement getStatement() { - return statement; - } - - @Override - public void readFields(DataInput paramDataInput) throws IOException { - statement = StatementSerializer.readStatement(paramDataInput.readUTF()); - } - - @Override - public void write(DataOutput paramDataOutput) throws IOException { - paramDataOutput.writeUTF(StatementSerializer.writeStatement(statement)); - } - - @Override - public Resource getSubject() { - return statement.getSubject(); - } - - @Override - public URI getPredicate() { - return statement.getPredicate(); - } - - @Override - public Value getObject() { - return statement.getObject(); - } - - @Override - public Resource getContext() { - return statement.getContext(); - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java deleted file mode 100644 index ecc23547c..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java +++ /dev/null @@ -1,227 +0,0 @@ -package mvm.rya.accumulo.mr.fileinput; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; - -import java.io.IOException; -import java.io.StringReader; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.indexing.FreeTextIndexer; -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer; -import mvm.rya.indexing.accumulo.geo.GeoMesaGeoIndexer; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Logger; -import org.geotools.feature.SchemaException; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.rio.ParserConfig; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.Rio; -import org.openrdf.rio.helpers.RDFHandlerBase; - -import com.google.common.base.Preconditions; - -/** - * Take large ntrips files and use MapReduce to ingest into other indexing - */ -public class BulkNtripsInputToolIndexing extends Configured implements Tool { - - private String userName = null; - private String pwd = null; - private String instance = null; - private String zk = null; - - private String format = RDFFormat.NTRIPLES.getName(); - - @Override - public int run(final String[] args) throws Exception { - final Configuration conf = getConf(); - // conf - zk = conf.get(MRUtils.AC_ZK_PROP, zk); - instance = conf.get(MRUtils.AC_INSTANCE_PROP, instance); - userName = conf.get(MRUtils.AC_USERNAME_PROP, userName); - pwd = conf.get(MRUtils.AC_PWD_PROP, pwd); - format = conf.get(MRUtils.FORMAT_PROP, format); - - String auths = conf.get(MRUtils.AC_CV_PROP, ""); - - conf.set(MRUtils.FORMAT_PROP, format); - Preconditions.checkNotNull(zk, MRUtils.AC_ZK_PROP + " not set"); - Preconditions.checkNotNull(instance, MRUtils.AC_INSTANCE_PROP + " not set"); - Preconditions.checkNotNull(userName, MRUtils.AC_USERNAME_PROP + " not set"); - Preconditions.checkNotNull(pwd, MRUtils.AC_PWD_PROP + " not set"); - - // map the config values to free text configu values - conf.set(ConfigUtils.CLOUDBASE_ZOOKEEPERS, zk); - conf.set(ConfigUtils.CLOUDBASE_INSTANCE, instance); - conf.set(ConfigUtils.CLOUDBASE_USER, userName); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, pwd); - conf.set(ConfigUtils.CLOUDBASE_AUTHS, auths); - - final String inputDir = args[0]; - - String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null); - Preconditions.checkNotNull(tablePrefix, MRUtils.TABLE_PREFIX_PROPERTY + " not set"); - - String docTextTable = tablePrefix + "text"; - conf.set(ConfigUtils.FREE_TEXT_DOC_TABLENAME, docTextTable); - - String docTermTable = tablePrefix + "terms"; - conf.set(ConfigUtils.FREE_TEXT_TERM_TABLENAME, docTermTable); - - String geoTable = tablePrefix + "geo"; - conf.set(ConfigUtils.GEO_TABLENAME, geoTable); - - System.out.println("Loading data into tables[freetext, geo]"); - System.out.println("Loading data into tables[" + docTermTable + " " + docTextTable + " " + geoTable + "]"); - - Job job = new Job(new Configuration(conf), "Bulk Ingest load data into Indexing Tables"); - job.setJarByClass(this.getClass()); - - // setting long job - Configuration jobConf = job.getConfiguration(); - jobConf.setBoolean("mapred.map.tasks.speculative.execution", false); - jobConf.setBoolean("mapred.reduce.tasks.speculative.execution", false); - jobConf.set("io.sort.mb", jobConf.get("io.sort.mb", "256")); - jobConf.setBoolean("mapred.compress.map.output", true); - - job.setInputFormatClass(TextInputFormat.class); - - job.setMapperClass(ParseNtripsMapper.class); - - // I'm not actually going to write output. - job.setOutputFormatClass(NullOutputFormat.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Text.class); - job.setMapOutputKeyClass(Text.class); - job.setMapOutputValueClass(Text.class); - - TextInputFormat.setInputPaths(job, new Path(inputDir)); - - job.setNumReduceTasks(0); - - job.waitForCompletion(true); - - return 0; - } - - public static void main(String[] args) throws Exception { - ToolRunner.run(new Configuration(), new BulkNtripsInputToolIndexing(), args); - } - - public static class ParseNtripsMapper extends Mapper { - private static final Logger logger = Logger.getLogger(ParseNtripsMapper.class); - - public static final String TABLE_PROPERTY = "parsentripsmapper.table"; - - private RDFParser parser; - private FreeTextIndexer freeTextIndexer; - private GeoIndexer geoIndexer; - private String rdfFormat; - - @Override - protected void setup(final Context context) throws IOException, InterruptedException { - super.setup(context); - Configuration conf = context.getConfiguration(); - - freeTextIndexer = new AccumuloFreeTextIndexer(); - freeTextIndexer.setConf(conf); - geoIndexer = new GeoMesaGeoIndexer(); - geoIndexer.setConf(conf); - final ValueFactory vf = new ValueFactoryImpl(); - - rdfFormat = conf.get(MRUtils.FORMAT_PROP); - checkNotNull(rdfFormat, "Rdf format cannot be null"); - - String namedGraphString = conf.get(MRUtils.NAMED_GRAPH_PROP); - checkNotNull(namedGraphString, MRUtils.NAMED_GRAPH_PROP + " cannot be null"); - - final Resource namedGraph = vf.createURI(namedGraphString); - - parser = Rio.createParser(RDFFormat.valueOf(rdfFormat)); - parser.setParserConfig(new ParserConfig(true, true, true, RDFParser.DatatypeHandling.VERIFY)); - parser.setRDFHandler(new RDFHandlerBase() { - - @Override - public void handleStatement(Statement statement) throws RDFHandlerException { - Statement contextStatement = new ContextStatementImpl(statement.getSubject(), statement - .getPredicate(), statement.getObject(), namedGraph); - try { - freeTextIndexer.storeStatement(RdfToRyaConversions.convertStatement(contextStatement)); - geoIndexer.storeStatement(RdfToRyaConversions.convertStatement(contextStatement)); - } catch (IOException e) { - logger.error("Error creating indexers", e); - } - } - }); - } - - @Override - public void map(LongWritable key, Text value, Context output) throws IOException, InterruptedException { - String rdf = value.toString(); - try { - parser.parse(new StringReader(rdf), ""); - } catch (RDFParseException e) { - System.out.println("Line[" + rdf + "] cannot be formatted with format[" + rdfFormat + "]. Exception[" + e.getMessage() - + "]"); - } catch (Exception e) { - logger.error("error during map", e); - throw new IOException("Exception occurred parsing triple[" + rdf + "]"); - } - } - - @Override - public void cleanup(Context context) { - IOUtils.closeStream(freeTextIndexer); - IOUtils.closeStream(geoIndexer); - } - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java deleted file mode 100644 index fb80804b6..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java +++ /dev/null @@ -1,243 +0,0 @@ -package mvm.rya.accumulo.mr.fileinput; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; - -import java.io.IOException; -import java.io.StringReader; - -import mvm.rya.accumulo.mr.RyaOutputFormat; -import mvm.rya.accumulo.mr.StatementWritable; -import mvm.rya.accumulo.mr.utils.MRUtils; -import mvm.rya.indexing.accumulo.ConfigUtils; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Logger; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.rio.ParserConfig; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.Rio; -import org.openrdf.rio.helpers.RDFHandlerBase; - -import com.google.common.base.Preconditions; - -/** - * Take large ntrips files and use MapReduce to ingest into other indexing - */ -public class RyaBatchWriterInputTool extends Configured implements Tool { - private static final Logger logger = Logger.getLogger(RyaBatchWriterInputTool.class); - - @Override - public int run(final String[] args) throws Exception { - String userName = null; - String pwd = null; - String instance = null; - String zk = null; - String format = null; - - final Configuration conf = getConf(); - // conf - zk = conf.get(MRUtils.AC_ZK_PROP, zk); - instance = conf.get(MRUtils.AC_INSTANCE_PROP, instance); - userName = conf.get(MRUtils.AC_USERNAME_PROP, userName); - pwd = conf.get(MRUtils.AC_PWD_PROP, pwd); - format = conf.get(MRUtils.FORMAT_PROP, RDFFormat.NTRIPLES.getName()); - - String auths = conf.get(MRUtils.AC_CV_PROP, ""); - - conf.set(MRUtils.FORMAT_PROP, format); - Preconditions.checkNotNull(zk, MRUtils.AC_ZK_PROP + " not set"); - Preconditions.checkNotNull(instance, MRUtils.AC_INSTANCE_PROP + " not set"); - Preconditions.checkNotNull(userName, MRUtils.AC_USERNAME_PROP + " not set"); - Preconditions.checkNotNull(pwd, MRUtils.AC_PWD_PROP + " not set"); - - // map the config values to free text configure values - conf.set(ConfigUtils.CLOUDBASE_ZOOKEEPERS, zk); - conf.set(ConfigUtils.CLOUDBASE_INSTANCE, instance); - conf.set(ConfigUtils.CLOUDBASE_USER, userName); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, pwd); - conf.set(ConfigUtils.CLOUDBASE_AUTHS, auths); - - final String inputDir = args[0]; - - String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null); - Preconditions.checkNotNull(tablePrefix, MRUtils.TABLE_PREFIX_PROPERTY + " not set"); - - String docTextTable = tablePrefix + "text"; - conf.set(ConfigUtils.FREE_TEXT_DOC_TABLENAME, docTextTable); - - String docTermTable = tablePrefix + "terms"; - conf.set(ConfigUtils.FREE_TEXT_TERM_TABLENAME, docTermTable); - - String geoTable = tablePrefix + "geo"; - conf.set(ConfigUtils.GEO_TABLENAME, geoTable); - - logger.info("Loading data into tables[rya, freetext, geo]"); - logger.info("Loading data into tables[" + docTermTable + " " + docTextTable + " " + geoTable + "]"); - - Job job = new Job(new Configuration(conf), "Batch Writer load data into Rya Core and Indexing Tables"); - job.setJarByClass(this.getClass()); - - // setting long job - Configuration jobConf = job.getConfiguration(); - jobConf.setBoolean("mapred.map.tasks.speculative.execution", false); - - jobConf.setInt("mapred.task.timeout", 1000 * 60 * 60 * 24); // timeout after 1 day - - job.setInputFormatClass(TextInputFormat.class); - - job.setMapperClass(ParseNtripsMapper.class); - - job.setNumReduceTasks(0); - - // Use Rya Output Format - job.setOutputFormatClass(RyaOutputFormat.class); - job.setOutputKeyClass(NullWritable.class); - job.setOutputValueClass(StatementWritable.class); - job.setMapOutputKeyClass(NullWritable.class); - job.setMapOutputValueClass(StatementWritable.class); - - TextInputFormat.setInputPaths(job, new Path(inputDir)); - - job.waitForCompletion(true); - - return 0; - } - - public static void main(String[] args) throws Exception { - ToolRunner.run(new Configuration(), new RyaBatchWriterInputTool(), args); - } - - public static class ParseNtripsMapper extends Mapper { - private static final Logger logger = Logger.getLogger(ParseNtripsMapper.class); - - private RDFParser parser; - private RDFFormat rdfFormat; - - @Override - protected void setup(final Context context) throws IOException, InterruptedException { - super.setup(context); - Configuration conf = context.getConfiguration(); - - final ValueFactory vf = new ValueFactoryImpl(); - - String rdfFormatName = conf.get(MRUtils.FORMAT_PROP); - checkNotNull(rdfFormatName, "Rdf format cannot be null"); - rdfFormat = RDFFormat.valueOf(rdfFormatName); - - String namedGraphString = conf.get(MRUtils.NAMED_GRAPH_PROP); - checkNotNull(namedGraphString, MRUtils.NAMED_GRAPH_PROP + " cannot be null"); - - final Resource namedGraph = vf.createURI(namedGraphString); - - parser = Rio.createParser(rdfFormat); - parser.setParserConfig(new ParserConfig(true, true, true, RDFParser.DatatypeHandling.VERIFY)); - parser.setRDFHandler(new RDFHandlerBase() { - @Override - public void handleStatement(Statement statement) throws RDFHandlerException { - Statement output; - if (rdfFormat.equals(RDFFormat.NTRIPLES)) { - output = new ConextStatementWrapper(statement, namedGraph); - } else { - output = statement; - } - try { - context.write(NullWritable.get(), new StatementWritable(output)); - } catch (IOException e) { - logger.error("Error writing statement", e); - throw new RDFHandlerException(e); - } catch (InterruptedException e) { - logger.error("Error writing statement", e); - throw new RDFHandlerException(e); - } - } - - }); - } - - @Override - public void map(LongWritable key, Text value, Context output) throws IOException, InterruptedException { - String rdf = value.toString(); - try { - parser.parse(new StringReader(rdf), ""); - } catch (RDFParseException e) { - logger.error("Line[" + rdf + "] cannot be formatted with format[" + rdfFormat + "]. Exception[" + e.getMessage() - + "]", e); - } catch (Exception e) { - logger.error("error during map", e); - throw new IOException("Exception occurred parsing triple[" + rdf + "]", e); - } - } - } - - @SuppressWarnings("serial") - private static class ConextStatementWrapper implements Statement { - private Statement statementWithoutConext; - private Resource context; - - public ConextStatementWrapper(Statement statementWithoutConext, Resource context) { - this.statementWithoutConext = statementWithoutConext; - this.context = context; - } - - @Override - public Resource getSubject() { - return statementWithoutConext.getSubject(); - } - - @Override - public URI getPredicate() { - return statementWithoutConext.getPredicate(); - } - - @Override - public Value getObject() { - return statementWithoutConext.getObject(); - } - - @Override - public Resource getContext() { - return context; - } - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java deleted file mode 100644 index 86cb73e8a..000000000 --- a/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java +++ /dev/null @@ -1,326 +0,0 @@ -package mvm.rya.accumulo.precompQuery; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Map.Entry; -import java.util.Set; - -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.openrdf.query.Binding; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -import mvm.rya.indexing.PrecompQueryIndexer; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet.AccValueFactory; - -public class AccumuloPrecompQueryIndexer implements PrecompQueryIndexer { - - - private Connector accCon; - private String tableName; - private Map bindings; - - - - public AccumuloPrecompQueryIndexer(Connector accCon, String tableName) { - this.accCon = accCon; - this.tableName = tableName; - } - - - @Override - public void storeBindingSet(BindingSet bs) throws IOException { - // TODO Auto-generated method stub - - } - - @Override - public void storeBindingSets(Collection bindingSets) throws IOException, IllegalArgumentException { - // TODO Auto-generated method stub - - } - - @Override - public CloseableIteration queryPrecompJoin(List varOrder, - String localityGroup, Map bindings, Map valMap, Collection bsConstraints) - throws QueryEvaluationException, TableNotFoundException { - - - final int prefixLength = Integer.parseInt(varOrder.remove(varOrder.size()-1)); - final Iterator> accIter; - final HashMultimap map = HashMultimap.create(); - final List extProdList = Lists.newArrayList(); - final Map bindingMap = bindings; - final List order = varOrder; - final BatchScanner bs = accCon.createBatchScanner(tableName, new Authorizations(), 10); - final Set ranges = Sets.newHashSet(); - - - - bs.fetchColumnFamily(new Text(localityGroup)); - - //process bindingSet and constant constraints - for (BindingSet bSet : bsConstraints) { - StringBuffer rangePrefix = new StringBuffer(); - int i = 0; - - for (String b : order) { - - if (i >= prefixLength) { - break; - } - - if (b.startsWith("-const-")) { - String val = bindings.get(b).create(valMap.get(b)); - rangePrefix.append(val); - rangePrefix.append("\u0000"); - } else { - - Binding v = bSet.getBinding(b); - if (v == null) { - throw new IllegalStateException("Binding set can't have null value!"); - } - String val = bindings.get(b).create(bSet.getValue(b)); - rangePrefix.append(val); - rangePrefix.append("\u0000"); - - } - - i++; - - } - if (rangePrefix.length() > 0) { - String prefixWithOutNull = rangePrefix.deleteCharAt(rangePrefix.length() - 1).toString(); - String prefixWithNull = prefixWithOutNull + "\u0001"; - Range r = new Range(new Key(prefixWithOutNull), true, new Key(prefixWithNull), false); - map.put(r, bSet); - ranges.add(r); - } else if (bSet.size() > 0) { - extProdList.add(bSet); - } - } - - //constant constraints and no bindingSet constraints - //add range of entire table if no constant constraints and - //bsConstraints consists of single, empty set (occurs when AIS is - //first node evaluated in query) - if (ranges.isEmpty() && bsConstraints.size() > 0) { - - if (prefixLength > 0) { - StringBuffer rangePrefix = new StringBuffer(); - - int i = 0; - for (String b : order) { - if (i >= prefixLength) { - break; - } - if (b.startsWith("-const-")) { - String val = bindings.get(b).create(valMap.get(b)); - rangePrefix.append(val); - rangePrefix.append("\u0000"); - } - i++; - } - - String prefixWithOutNull = rangePrefix.deleteCharAt(rangePrefix.length() - 1).toString(); - String prefixWithNull = prefixWithOutNull + "\u0001"; - Range r = new Range(new Key(prefixWithOutNull), true, new Key(prefixWithNull), false); - ranges.add(r); - - } else { // no constant or bindingSet constraints - ranges.add(new Range("", true, "~", false)); - } - } - - if (ranges.size() == 0) { - accIter = null; - } else { - bs.setRanges(ranges); - accIter = bs.iterator(); - } - - - return new CloseableIteration() { - - @Override - public void remove() throws QueryEvaluationException { - throw new UnsupportedOperationException(); - } - - private Iterator inputSet = null; - private QueryBindingSet currentSolutionBs = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - - - - @Override - public BindingSet next() throws QueryEvaluationException { - QueryBindingSet bs = new QueryBindingSet(); - - if (hasNextCalled) { - hasNextCalled = false; - if (inputSet != null) { - bs.addAll(inputSet.next()); - } - bs.addAll(currentSolutionBs); - } else if (isEmpty) { - throw new NoSuchElementException(); - } else { - if (this.hasNext()) { - hasNextCalled = false; - if (inputSet != null) { - bs.addAll(inputSet.next()); - } - bs.addAll(currentSolutionBs); - } else { - throw new NoSuchElementException(); - } - } - - return bs; - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - - if(accIter == null ) { - isEmpty = true; - return false; - } - - if (!hasNextCalled && !isEmpty) { - while (accIter.hasNext() || (inputSet != null && inputSet.hasNext())) { - - if(inputSet != null && inputSet.hasNext()) { - hasNextCalled = true; - return true; - } - - - Key k = accIter.next().getKey(); - final String[] s = k.getRow().toString().split("\u0000"); - - StringBuilder rangePrefix = new StringBuilder(); - // TODO Assuming that order specifies order of variables - // commmon to - // bindingSet passed in and variables in index table - // --size is equal to - - for (int i = 0; i < prefixLength; i++) { - rangePrefix.append(s[i]); - rangePrefix.append("\u0000"); - } - - // TODO I need to remember what the type was! - currentSolutionBs = new QueryBindingSet(); - int i = 0; - for (String b : order) { - if (b.startsWith("-const")) { - i++; - } else { - final String v = s[i]; - currentSolutionBs.addBinding(b, bindingMap.get(b).create(v)); - i++; - } - - } - //check to see if bindingSet constraints exist - if (map.size() > 0) { - String prefixWithOutNull = rangePrefix.deleteCharAt(rangePrefix.length() - 1).toString(); - String prefixWithNull = prefixWithOutNull + "\u0001"; - Range r = new Range(new Key(prefixWithOutNull), true, new Key(prefixWithNull), false); - inputSet = map.get(r).iterator(); - if (!inputSet.hasNext()) { - continue; - } else { - hasNextCalled = true; - return true; - } // check to see if binding set constraints exist, but no common vars - } else if (extProdList.size() > 0) { - inputSet = extProdList.iterator(); - hasNextCalled = true; - return true; - }else { //no bindingsSet constraints--only constant constraints or none - hasNextCalled = true; - return true; - } - } - - isEmpty = true; - return false; - - } else if (isEmpty) { - return false; - } else { - return true; - } - - } - - @Override - public void close() throws QueryEvaluationException { - bs.close(); - } - - }; - } - - - - @Override - public void flush() throws IOException { - // TODO Auto-generated method stub - - } - - @Override - public void close() throws IOException { - // TODO Auto-generated method stub - - } - - - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java deleted file mode 100644 index 21d5de7d0..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java +++ /dev/null @@ -1,47 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.Closeable; -import java.io.IOException; -import java.util.Collection; - -import mvm.rya.indexing.accumulo.entity.StarQuery; - -import org.apache.accumulo.core.client.TableNotFoundException; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; - -public interface DocIdIndexer extends Closeable { - - - - public abstract CloseableIteration queryDocIndex(StarQuery query, - Collection constraints) throws TableNotFoundException, QueryEvaluationException; - - - - @Override - public abstract void close() throws IOException; - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java deleted file mode 100644 index 5d2678ba5..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java +++ /dev/null @@ -1,358 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.indexing.IndexingFunctionRegistry.FUNCTION_TYPE; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer; -import mvm.rya.indexing.accumulo.freetext.FreeTextTupleSet; -import mvm.rya.indexing.accumulo.geo.GeoMesaGeoIndexer; -import mvm.rya.indexing.accumulo.geo.GeoTupleSet; -import mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer; -import mvm.rya.indexing.accumulo.temporal.TemporalTupleSet; -import mvm.rya.indexing.mongodb.MongoGeoIndexer; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.commons.lang.Validate; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.geotools.feature.SchemaException; -import org.openrdf.model.Resource; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.algebra.And; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.FunctionCall; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.LeftJoin; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.ValueConstant; -import org.openrdf.query.algebra.ValueExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.google.common.collect.Lists; - -public class FilterFunctionOptimizer implements QueryOptimizer, Configurable { - - private ValueFactory valueFactory = new ValueFactoryImpl(); - - private Configuration conf; - private GeoIndexer geoIndexer; - private FreeTextIndexer freeTextIndexer; - private TemporalIndexer temporalIndexer; - private boolean init = false; - - - public FilterFunctionOptimizer() { - } - - - public FilterFunctionOptimizer(AccumuloRdfConfiguration conf) throws AccumuloException, AccumuloSecurityException, - TableNotFoundException, IOException, SchemaException, TableExistsException { - this.conf = conf; - init(); - } - - //setConf initializes FilterFunctionOptimizer so reflection can be used - //to create optimizer in RdfCloudTripleStoreConnection - @Override - public void setConf(Configuration conf) { - this.conf = conf; - init(); - } - - - private void init() { - if (!init) { - if (ConfigUtils.getUseMongo(conf)) { - this.geoIndexer = new MongoGeoIndexer(); - geoIndexer.setConf(conf); - } else { - this.geoIndexer = new GeoMesaGeoIndexer(); - geoIndexer.setConf(conf); - this.freeTextIndexer = new AccumuloFreeTextIndexer(); - freeTextIndexer.setConf(conf); - this.temporalIndexer = new AccumuloTemporalIndexer(); - temporalIndexer.setConf(conf); - init = true; - } - } - } - - @Override - public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) { - // find variables used in property and resource based searches: - SearchVarVisitor searchVars = new SearchVarVisitor(); - tupleExpr.visit(searchVars); - // rewrites for property searches: - processPropertySearches(tupleExpr, searchVars.searchProperties); - - } - - - - private void processPropertySearches(TupleExpr tupleExpr, Collection searchProperties) { - MatchStatementVisitor matchStatements = new MatchStatementVisitor(searchProperties); - tupleExpr.visit(matchStatements); - for (StatementPattern matchStatement: matchStatements.matchStatements) { - Var subject = matchStatement.getSubjectVar(); - if (subject.hasValue() && !(subject.getValue() instanceof Resource)) - throw new IllegalArgumentException("Query error: Found " + subject.getValue() + ", expected an URI or BNode"); - Validate.isTrue(subject.hasValue() || subject.getName() != null); - Validate.isTrue(!matchStatement.getObjectVar().hasValue() && matchStatement.getObjectVar().getName() != null); - buildQuery(tupleExpr, matchStatement); - } - - } - - - private void buildQuery(TupleExpr tupleExpr, StatementPattern matchStatement) { - //If our IndexerExpr (to be) is the rhs-child of LeftJoin, we can safely make that a Join: - // the IndexerExpr will (currently) not return results that can deliver unbound variables. - //This optimization should probably be generalized into a LeftJoin -> Join optimizer under certain conditions. Until that - // has been done, this code path at least takes care of queries generated by OpenSahara SparqTool that filter on OPTIONAL - // projections. E.g. summary~'full text search' (summary is optional). See #379 - if (matchStatement.getParentNode() instanceof LeftJoin) { - LeftJoin leftJoin = (LeftJoin)matchStatement.getParentNode(); - if (leftJoin.getRightArg() == matchStatement && leftJoin.getCondition() == null) - matchStatement.getParentNode().replaceWith(new Join(leftJoin.getLeftArg(), leftJoin.getRightArg())); - } - FilterFunction fVisitor = new FilterFunction(matchStatement.getObjectVar().getName()); - tupleExpr.visit(fVisitor); - List results = Lists.newArrayList(); - for(int i = 0; i < fVisitor.func.size(); i++){ - results.add(new IndexingExpr(fVisitor.func.get(i), matchStatement, fVisitor.args.get(i))); - } - removeMatchedPattern(tupleExpr, matchStatement, new IndexerExprReplacer(results)); - - } - - //find vars contained in filters - private static class SearchVarVisitor extends QueryModelVisitorBase { - - private final Collection searchProperties = new ArrayList(); - - @Override - public void meet(FunctionCall fn) { - URI fun = new URIImpl(fn.getURI()); - Var result = IndexingFunctionRegistry.getResultVarFromFunctionCall(fun, fn.getArgs()); - if (result != null && !searchProperties.contains(result)) - searchProperties.add(result); - } - } - - //find StatementPatterns containing filter variables - private static class MatchStatementVisitor extends QueryModelVisitorBase { - private final Collection propertyVars; - private final Collection usedVars = new ArrayList(); - private final List matchStatements = new ArrayList(); - - public MatchStatementVisitor(Collection propertyVars) { - this.propertyVars = propertyVars; - } - - @Override public void meet(StatementPattern statement) { - Var object = statement.getObjectVar(); - if (propertyVars.contains(object)) - if (usedVars.contains(object)) - throw new IllegalArgumentException("Illegal search, variable is used multiple times as object: " + object.getName()); - else { - usedVars.add(object); - matchStatements.add(statement); - } - } - } - - private abstract class AbstractEnhanceVisitor extends QueryModelVisitorBase { - final String matchVar; - List func = Lists.newArrayList(); - List args = Lists.newArrayList(); - - public AbstractEnhanceVisitor(String matchVar) { - this.matchVar = matchVar; - } - - protected void addFilter(URI uri, Value[] values) { - func.add(uri); - args.add(values); - } - } - - //create indexing expression for each filter matching var in filter StatementPattern - //replace old filter condition with true condition - private class FilterFunction extends AbstractEnhanceVisitor { - public FilterFunction(String matchVar) { - super(matchVar); - } - - @Override - public void meet(FunctionCall call) { - URI fnUri = valueFactory.createURI(call.getURI()); - Var resultVar = IndexingFunctionRegistry.getResultVarFromFunctionCall(fnUri, call.getArgs()); - if (resultVar != null && resultVar.getName().equals(matchVar)) { - addFilter(valueFactory.createURI(call.getURI()), extractArguments(matchVar, call)); - if (call.getParentNode() instanceof Filter || call.getParentNode() instanceof And || call.getParentNode() instanceof LeftJoin) - call.replaceWith(new ValueConstant(valueFactory.createLiteral(true))); - else - throw new IllegalArgumentException("Query error: Found " + call + " as part of an expression that is too complex"); - } - } - - - private Value[] extractArguments(String matchName, FunctionCall call) { - Value args[] = new Value[call.getArgs().size() - 1]; - int argI = 0; - for (int i = 0; i != call.getArgs().size(); ++i) { - ValueExpr arg = call.getArgs().get(i); - if (argI == i && arg instanceof Var && matchName.equals(((Var)arg).getName())) - continue; - if (arg instanceof ValueConstant) - args[argI] = ((ValueConstant)arg).getValue(); - else if (arg instanceof Var && ((Var)arg).hasValue()) - args[argI] = ((Var)arg).getValue(); - else - throw new IllegalArgumentException("Query error: Found " + arg + ", expected a Literal, BNode or URI"); - ++argI; - } - return args; - } - - @Override - public void meet(Filter filter) { - //First visit children, then condition (reverse of default): - filter.getArg().visit(this); - filter.getCondition().visit(this); - } - } - - private void removeMatchedPattern(TupleExpr tupleExpr, StatementPattern pattern, TupleExprReplacer replacer) { - List indexTuples = replacer.createReplacement(pattern); - if (indexTuples.size() > 1) { - VarExchangeVisitor vev = new VarExchangeVisitor(pattern); - tupleExpr.visit(vev); - Join join = new Join(indexTuples.remove(0), indexTuples.remove(0)); - for (TupleExpr geo : indexTuples) { - join = new Join(join, geo); - } - pattern.replaceWith(join); - } else if (indexTuples.size() == 1) { - pattern.replaceWith(indexTuples.get(0)); - pattern.setParentNode(null); - } else { - throw new IllegalStateException("Must have at least one replacement for matched StatementPattern."); - } - } - - private interface TupleExprReplacer { - List createReplacement(TupleExpr org); - } - - - //replace each filter pertinent StatementPattern with corresponding index expr - private class IndexerExprReplacer implements TupleExprReplacer { - private final List indxExpr; - private FUNCTION_TYPE type; - - public IndexerExprReplacer(List indxExpr) { - this.indxExpr = indxExpr; - URI func = indxExpr.get(0).getFunction(); - this.type = IndexingFunctionRegistry.getFunctionType(func); - } - - @Override - public List createReplacement(TupleExpr org) { - List indexTuples = Lists.newArrayList(); - switch (type) { - case GEO: - for (IndexingExpr indx : indxExpr) { - indexTuples.add(new GeoTupleSet(indx, geoIndexer)); - } - break; - case FREETEXT: - for (IndexingExpr indx : indxExpr) { - indexTuples.add(new FreeTextTupleSet(indx, freeTextIndexer)); - } - break; - case TEMPORAL: - for (IndexingExpr indx : indxExpr) { - indexTuples.add(new TemporalTupleSet(indx, temporalIndexer)); - } - break; - default: - throw new IllegalArgumentException("Incorrect type!"); - - } - return indexTuples; - } - } - - - private static class VarExchangeVisitor extends QueryModelVisitorBase { - - private final StatementPattern exchangeVar; - - public VarExchangeVisitor(StatementPattern sp) { - this.exchangeVar = sp; - } - - @Override - public void meet(Join node) { - QueryModelNode lNode = node.getLeftArg(); - if (lNode instanceof StatementPattern) { - exchangeVar.replaceWith(lNode); - node.setLeftArg(exchangeVar); - } else { - super.meet(node); - } - } - } - - - - - - - @Override - public Configuration getConf() { - return conf; - } - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java deleted file mode 100644 index 2d8bae9cc..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java +++ /dev/null @@ -1,62 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Set; - -import mvm.rya.api.persist.index.RyaSecondaryIndexer; - -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -/** - * A repository to store, index, and retrieve {@link Statement}s based on freetext features. - */ -public interface FreeTextIndexer extends RyaSecondaryIndexer { - - /** - * Query the Free Text Index with specific constraints. A null or empty parameters imply no constraint. - * - * @param query - * the query to perform - * @param contraints - * the constraints on the statements returned - * @return the set of statements that meet the query and other constraints. - * @throws IOException - */ - public abstract CloseableIteration queryText(String query, StatementContraints contraints) throws IOException; - - /** - * @return the set of predicates indexed by the indexer. - */ - public abstract Set getIndexablePredicates(); - - @Override - public abstract void flush() throws IOException; - - @Override - public abstract void close() throws IOException; -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java deleted file mode 100644 index 7c04903c5..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java +++ /dev/null @@ -1,201 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Set; - -import mvm.rya.api.persist.index.RyaSecondaryIndexer; - -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -import com.vividsolutions.jts.geom.Geometry; - -/** - * A repository to store, index, and retrieve {@link Statement}s based on geospatial features. - */ -public interface GeoIndexer extends RyaSecondaryIndexer { - /** - * Returns statements that contain a geometry that is equal to the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • - * "Two geometries are topologically equal if their interiors intersect and no part of the interior or boundary of one geometry intersects the exterior of the other" - *
  • "A is equal to B if A is within B and A contains B" - *
- * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryEquals(Geometry query, StatementContraints contraints); - - /** - * Returns statements that contain a geometry that is disjoint to the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • "A and B are disjoint if they have no point in common. They form a set of disconnected geometries." - *
  • "A and B are disjoint if A does not intersect B" - *
- * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryDisjoint(Geometry query, StatementContraints contraints); - - /** - * Returns statements that contain a geometry that Intersects the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • "a intersects b: geometries a and b have at least one point in common." - *
  • "not Disjoint" - *
- * - * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryIntersects(Geometry query, StatementContraints contraints); - - /** - * Returns statements that contain a geometry that Touches the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • "a touches b, they have at least one boundary point in common, but no interior points." - *
- * - * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryTouches(Geometry query, StatementContraints contraints); - - /** - * Returns statements that contain a geometry that crosses the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • - * "a crosses b, they have some but not all interior points in common (and the dimension of the intersection is less than that of at least one of them)." - *
- * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryCrosses(Geometry query, StatementContraints contraints); - - /** - * Returns statements that contain a geometry that is Within the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • "a is within b, a lies in the interior of b" - *
  • Same as: "Contains(b,a)" - *
- * - * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryWithin(Geometry query, StatementContraints contraints); - - /** - * Returns statements that contain a geometry that Contains the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • b is within a. Geometry b lies in the interior of a. Another definition: - * "a 'contains' b iff no points of b lie in the exterior of a, and at least one point of the interior of b lies in the interior of a" - *
  • Same: Within(b,a) - *
- * - * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryContains(Geometry query, StatementContraints contraints); - - /** - * Returns statements that contain a geometry that Overlaps the queried {@link Geometry} and meet the {@link StatementContraints}. - * - *

- * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM): - *

    - *
  • a crosses b, they have some but not all interior points in common (and the dimension of the intersection is less than that of at - * least one of them). - *
- * - * - * @param query - * the queried geometry - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryOverlaps(Geometry query, StatementContraints contraints); - - /** - * @return the set of predicates indexed by the indexer. - */ - public abstract Set getIndexablePredicates(); - - @Override - public abstract void flush() throws IOException; - - @Override - public abstract void close() throws IOException; -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java deleted file mode 100644 index ee3d44474..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java +++ /dev/null @@ -1,34 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; - -import org.openrdf.query.algebra.TupleExpr; - -public interface ExternalIndexMatcher { - - - public Iterator getIndexedTuples(); - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java deleted file mode 100644 index 27a0d1563..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java +++ /dev/null @@ -1,730 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - - - -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import mvm.rya.indexing.external.QueryVariableNormalizer.VarCollector; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.openrdf.query.algebra.BindingSetAssignment; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; - -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -/** - * Processes a {@link TupleExpr} and replaces sets of elements in the tree with {@link ExternalTupleSet} objects. - */ -public class GeneralizedExternalProcessor { - - - /** - * Iterates through list of normalized indexes and replaces all subtrees of query which match index with index. - * - * @param query - * @return TupleExpr - */ - public static TupleExpr process(TupleExpr query, List indexSet) { - - boolean indexPlaced = false; - TupleExpr rtn = query.clone(); - - - //TODO optimization: turn on when testing done - QueryNodeCount qnc = new QueryNodeCount(); - rtn.visit(qnc); - - if(qnc.getNodeCount()/2 < indexSet.size()) { - return null; - } - - - //move BindingSetAssignment Nodes out of the way - organizeBSAs(rtn); - - - // test to see if query contains no other nodes - // than filter, join, projection, and statement pattern and - // test whether query contains duplicate StatementPatterns and filters - if (isTupleValid(rtn)) { - - for (ExternalTupleSet index : indexSet) { - - // test to see if index contains at least one StatementPattern, - // that StatementPatterns are unique, - // and that all variables found in filters occur in some - // StatementPattern - if (isTupleValid(index.getTupleExpr())) { - - ExternalTupleSet eTup = (ExternalTupleSet) index.clone(); - SPBubbleDownVisitor indexVistor = new SPBubbleDownVisitor(eTup); - rtn.visit(indexVistor); - FilterBubbleManager fbmv = new FilterBubbleManager(eTup); - rtn.visit(fbmv); - SubsetEqualsVisitor subIndexVis = new SubsetEqualsVisitor(eTup, rtn); - rtn.visit(subIndexVis); - indexPlaced = subIndexVis.indexPlaced(); - if(!indexPlaced) { - break; - } - - } - - } - if(indexPlaced) { -// if(indexSet.size() == 3) { -// System.out.println("IndexSet is " + indexSet); -// System.out.println("Tuple is " + rtn); -// } - return rtn; - } else { -// if(indexSet.size() == 3) { -// System.out.println("IndexSet is " + indexSet); -// } -// - return null; - } - - } else { - throw new IllegalArgumentException("Invalid Query."); - } - } - - - - - - // determines whether query is valid, which requires that a - // query must contain a StatementPattern, not contain duplicate - // Statement Patterns or Filters, not be comprised of only Projection, - // Join, StatementPattern, and Filter nodes, and that any variable - // appearing in a Filter must appear in a StatementPattern. - private static boolean isTupleValid(QueryModelNode node) { - - ValidQueryVisitor vqv = new ValidQueryVisitor(); - node.visit(vqv); - - Set spVars = getVarNames(getQNodes("sp", node)); - - if (vqv.isValid() && (spVars.size() > 0)) { - - FilterCollector fvis = new FilterCollector(); - node.visit(fvis); - List fList = fvis.getFilters(); - return (fList.size() == Sets.newHashSet(fList).size() && getVarNames(fList).size() <= spVars.size()); - - } else { - return false; - } - } - - private static Set getQNodes(QueryModelNode queryNode) { - Set rtns = new HashSet(); - - StatementPatternCollector spc = new StatementPatternCollector(); - queryNode.visit(spc); - rtns.addAll(spc.getStatementPatterns()); - - FilterCollector fvis = new FilterCollector(); - queryNode.visit(fvis); - rtns.addAll(fvis.getFilters()); - - ExternalTupleCollector eVis = new ExternalTupleCollector(); - queryNode.visit(eVis); - rtns.addAll(eVis.getExtTup()); - - return rtns; - } - - private static Set getQNodes(String node, QueryModelNode queryNode) { - - if (node.equals("sp")) { - Set eSet = new HashSet(); - StatementPatternCollector spc = new StatementPatternCollector(); - queryNode.visit(spc); - List spList = spc.getStatementPatterns(); - eSet.addAll(spList); - // returns empty set if list contains duplicate StatementPatterns - if (spList.size() > eSet.size()) { - return Sets.newHashSet(); - } else { - return eSet; - } - } else if (node.equals("filter")) { - - FilterCollector fvis = new FilterCollector(); - queryNode.visit(fvis); - - return Sets.newHashSet(fvis.getFilters()); - } else { - - throw new IllegalArgumentException("Invalid node type."); - } - } - - // moves StatementPatterns in query that also occur in index to bottom of - // query tree. - private static class SPBubbleDownVisitor extends QueryModelVisitorBase { - - private TupleExpr tuple; - private QueryModelNode indexQNode; - private Set sSet = Sets.newHashSet(); - - public SPBubbleDownVisitor(ExternalTupleSet index) { - - this.tuple = index.getTupleExpr(); - indexQNode = ((Projection) tuple).getArg(); - sSet = getQNodes("sp", indexQNode); - - } - - public void meet(Projection node) { - // moves external tuples above statement patterns before attempting - // to bubble down index statement patterns found in query tree - - organizeExtTuples(node); - - super.meet(node); - } - - public void meet(Join node) { - // if right node contained in index, move it to bottom of query tree - if (sSet.contains(node.getRightArg())) { - - Set eSet = getQNodes("sp", node); - Set compSet = Sets.difference(eSet, sSet); - - if (eSet.containsAll(sSet)) { - - QNodeExchanger qne = new QNodeExchanger(node.getRightArg(), compSet); - node.visit(qne); - node.replaceChildNode(node.getRightArg(), qne.getReplaced()); - - super.meet(node); - } - return; - } - // if left node contained in index, move it to bottom of query tree - else if (sSet.contains(node.getLeftArg())) { - - Set eSet = getQNodes("sp", node); - Set compSet = Sets.difference(eSet, sSet); - - if (eSet.containsAll(sSet)) { - - QNodeExchanger qne = new QNodeExchanger(node.getLeftArg(), compSet); - node.visit(qne); - node.replaceChildNode(node.getLeftArg(), qne.getReplaced()); - - super.meet(node); - } - return; - - } else { - super.meet(node); - } - - } - - // moves all ExternalTupleSets in query tree above remaining - // StatementPatterns - private static void organizeExtTuples(QueryModelNode node) { - - ExternalTupleCollector eVis = new ExternalTupleCollector(); - node.visit(eVis); - - ExtTupleExchangeVisitor oev = new ExtTupleExchangeVisitor(eVis.getExtTup()); - node.visit(oev); - } - - } - - // given a replacement QueryModelNode and compSet, this visitor replaces the - // first - // element in the query tree that occurs in compSet with replacement and - // returns - // the element that was replaced. - private static class QNodeExchanger extends QueryModelVisitorBase { - - private QueryModelNode toBeReplaced; - private QueryModelNode replacement; - private Set compSet; - - public QNodeExchanger(QueryModelNode replacement, Set compSet) { - this.replacement = replacement; - this.toBeReplaced = replacement; - this.compSet = compSet; - } - - public QueryModelNode getReplaced() { - return toBeReplaced; - } - - public void meet(Join node) { - - if (compSet.contains(node.getRightArg())) { - this.toBeReplaced = node.getRightArg(); - node.replaceChildNode(node.getRightArg(), replacement); - return; - } else if (compSet.contains(node.getLeftArg())) { - this.toBeReplaced = node.getLeftArg(); - node.replaceChildNode(node.getLeftArg(), replacement); - return; - } else { - super.meet(node); - } - - } - - } - - // moves filter that occurs in both query and index down the query tree so - // that that it is positioned - // above statement patterns associated with index. Precondition for calling - // this method is that - // SPBubbleDownVisitor has been called to position index StatementPatterns - // within query tree. - //TODO this visitor assumes that all filters are positioned at top of query tree - //could lead to problems if filter optimizer called before external processor - private static class FilterBubbleDownVisitor extends QueryModelVisitorBase { - - private QueryModelNode filter; - private Set compSet; - private boolean filterPlaced = false; - - public FilterBubbleDownVisitor(QueryModelNode filter, Set compSet) { - this.filter = filter; - this.compSet = compSet; - - } - - public boolean filterPlaced() { - return filterPlaced; - } - - public void meet(Join node) { - - if (!compSet.contains(node.getRightArg())) { - // looks for placed to position filter node. if right node is - // contained in index - // and left node is statement pattern node contained in index or - // is a join, place - // filter above join. - if (node.getLeftArg() instanceof Join || !(compSet.contains(node.getLeftArg()))) { - - QueryModelNode pNode = node.getParentNode(); - ((Filter) filter).setArg(node); - pNode.replaceChildNode(node, filter); - filterPlaced = true; - - return; - } // otherwise place filter below join and above right arg - else { - ((Filter) filter).setArg(node.getRightArg()); - node.replaceChildNode(node.getRightArg(), filter); - filterPlaced = true; - return; - - } - } else if ((node.getLeftArg() instanceof StatementPattern) && !compSet.contains(node.getLeftArg())) { - - ((Filter) filter).setArg(node.getLeftArg()); - node.replaceChildNode(node.getLeftArg(), filter); - filterPlaced = true; - - return; - } else { - super.meet(node); - } - } - - } - - private static Set getVarNames(Collection nodes) { - - List tempVars; - Set nodeVarNames = Sets.newHashSet(); - - for (QueryModelNode s : nodes) { - tempVars = VarCollector.process(s); - for (String t : tempVars) - nodeVarNames.add(t); - } - return nodeVarNames; - - } - - // visitor which determines whether or not to reposition a filter by calling - // FilterBubbleDownVisitor - private static class FilterBubbleManager extends QueryModelVisitorBase { - - private TupleExpr tuple; - private QueryModelNode indexQNode; - private Set sSet = Sets.newHashSet(); - private Set bubbledFilters = Sets.newHashSet(); - - public FilterBubbleManager(ExternalTupleSet index) { - this.tuple = index.getTupleExpr(); - indexQNode = ((Projection) tuple).getArg(); - sSet = getQNodes(indexQNode); - - } - - public void meet(Filter node) { - - Set eSet = getQNodes(node); - Set compSet = Sets.difference(eSet, sSet); - - // if index contains filter node and it hasn't already been moved, - // move it down - // query tree just above position of statement pattern nodes found - // in both query tree - // and index (assuming that SPBubbleDownVisitor has already been - // called) - if (sSet.contains(node.getCondition()) && !bubbledFilters.contains(node.getCondition())) { - FilterBubbleDownVisitor fbdv = new FilterBubbleDownVisitor((Filter) node.clone(), compSet); - node.visit(fbdv); - bubbledFilters.add(node.getCondition()); - // checks if filter correctly placed, and if it has been, - // removes old copy of filter - if (fbdv.filterPlaced()) { - - QueryModelNode pNode = node.getParentNode(); - TupleExpr cNode = node.getArg(); - pNode.replaceChildNode(node, cNode); - - - super.meetNode(pNode); - } - super.meet(node); - - } else { - super.meet(node); - } - } - } - - // iterates through the query tree and attempts to match subtrees with - // index. When a match is - // found, the subtree is replaced by an ExternalTupleSet formed from the - // index. Pre-condition for - // calling this method is that both SPBubbleDownVisitor and - // FilterBubbleManager have been called - // to position the StatementPatterns and Filters. - private static class SubsetEqualsVisitor extends QueryModelVisitorBase { - - private TupleExpr query; - private TupleExpr tuple; - private QueryModelNode indexQNode; - private ExternalTupleSet set; - private Set sSet = Sets.newHashSet(); - private TupleExpr temp; - private boolean indexPlaced = false; - - - public SubsetEqualsVisitor(ExternalTupleSet index, TupleExpr query) { - this.query = query; - this.tuple = index.getTupleExpr(); - this.set = index; - indexQNode = ((Projection) tuple).getArg(); - sSet = getQNodes(indexQNode); - - } - - public boolean indexPlaced() { - return indexPlaced; - } - - - public void meet(Join node) { - - Set eSet = getQNodes(node); - - if (eSet.containsAll(sSet) && !(node.getRightArg() instanceof BindingSetAssignment)) { - -// System.out.println("Eset is " + eSet + " and sSet is " + sSet); - - if (eSet.equals(sSet)) { - node.replaceWith(set); - indexPlaced = true; - return; - } else { - if (node.getLeftArg() instanceof StatementPattern && sSet.size() == 1) { - if(sSet.contains(node.getLeftArg())) { - node.setLeftArg(set); - indexPlaced = true; - } else if(sSet.contains(node.getRightArg())) { - node.setRightArg(set); - indexPlaced = true; - } else { - return; - } - } - else { - super.meet(node); - } - } - } else if (eSet.containsAll(sSet)) { - - super.meet(node); - - } else { - return; - } - - } - //TODO might need to include BindingSetAssignment Condition here - //to account for index consisting of only filter and BindingSetAssignment nodes - public void meet(Filter node) { - - Set eSet = getQNodes(node); - - if (eSet.containsAll(sSet)) { - - if (eSet.equals(sSet)) { - node.replaceWith(set); - indexPlaced = true; - return; - } else { - node.getArg().visit(this); - } - } - } - - - public void meet(StatementPattern node) { - return; - } - } - - // visitor which determines whether a query is valid (i.e. it does not - // contain nodes other than - // Projection, Join, Filter, StatementPattern ) - private static class ValidQueryVisitor extends QueryModelVisitorBase { - - private boolean isValid = true; - - public boolean isValid() { - return isValid; - } - - public void meet(Projection node) { - node.getArg().visit(this); - } - - public void meet(Filter node) { - node.getArg().visit(this); - } - - - - - - public void meetNode(QueryModelNode node) { - - if (!((node instanceof Join) || (node instanceof StatementPattern) || (node instanceof BindingSetAssignment) || (node instanceof Var))) { - isValid = false; - return; - - } else{ - super.meetNode(node); - } - } - - } - - // repositions ExternalTuples above StatementPatterns within query tree - private static class ExtTupleExchangeVisitor extends QueryModelVisitorBase { - - private Set extTuples; - - public ExtTupleExchangeVisitor(Set extTuples) { - this.extTuples = extTuples; - } - - public void meet(Join queryNode) { - - // if query tree contains external tuples and they are not - // positioned above statement pattern node - // reposition - if (this.extTuples.size() > 0 && !(queryNode.getRightArg() instanceof ExternalTupleSet) - && !(queryNode.getRightArg() instanceof BindingSetAssignment)) { - - if (queryNode.getLeftArg() instanceof ExternalTupleSet) { - QueryModelNode temp = queryNode.getLeftArg(); - queryNode.setLeftArg(queryNode.getRightArg()); - queryNode.setRightArg((TupleExpr)temp); - } else { - - QNodeExchanger qnev = new QNodeExchanger((QueryModelNode) queryNode.getRightArg(), this.extTuples); - queryNode.visit(qnev); - queryNode.replaceChildNode(queryNode.getRightArg(), qnev.getReplaced()); - super.meet(queryNode); - } - } else { - super.meet(queryNode); - } - - } - - } - - private static class ExternalTupleCollector extends QueryModelVisitorBase { - - private Set eSet = new HashSet(); - - @Override - public void meetNode(QueryModelNode node) throws RuntimeException { - if (node instanceof ExternalTupleSet) { - eSet.add(node); - } - super.meetNode(node); - } - - public Set getExtTup() { - return eSet; - } - - } - - private static class FilterCollector extends QueryModelVisitorBase { - - private List filterList = Lists.newArrayList(); - - public List getFilters() { - return filterList; - } - - @Override - public void meet(Filter node) { - filterList.add(node.getCondition()); - super.meet(node); - } - - } - - private static void organizeBSAs(QueryModelNode node) { - - BindingSetAssignmentCollector bsac = new BindingSetAssignmentCollector(); - node.visit(bsac); - - if (bsac.containsBSAs()) { - Set bsaSet = bsac.getBindingSetAssignments(); - BindingSetAssignmentExchangeVisitor bsaev = new BindingSetAssignmentExchangeVisitor(bsaSet); - node.visit(bsaev); - } - } - - // repositions ExternalTuples above StatementPatterns within query tree - private static class BindingSetAssignmentExchangeVisitor extends QueryModelVisitorBase { - - private Set bsas; - - public BindingSetAssignmentExchangeVisitor(Set bsas) { - this.bsas = bsas; - } - - public void meet(Join queryNode) { - - // if query tree contains external tuples and they are not - // positioned above statement pattern node - // reposition - if (this.bsas.size() > 0 && !(queryNode.getRightArg() instanceof BindingSetAssignment)) { - QNodeExchanger qnev = new QNodeExchanger((QueryModelNode) queryNode.getRightArg(), bsas); - queryNode.visit(qnev); - queryNode.replaceChildNode(queryNode.getRightArg(), qnev.getReplaced()); - super.meet(queryNode); - } else { - super.meet(queryNode); - } - - } - - } - - - public static class BindingSetAssignmentCollector extends QueryModelVisitorBase { - - private Set bindingSetList = Sets.newHashSet(); - - public Set getBindingSetAssignments() { - return bindingSetList; - } - - public boolean containsBSAs() { - return (bindingSetList.size() > 0); - } - - @Override - public void meet(BindingSetAssignment node) { - bindingSetList.add(node); - super.meet(node); - } - - } - - - - public static class QueryNodeCount extends QueryModelVisitorBase { - - private int nodeCount; - - public QueryNodeCount() { - nodeCount = 0; - } - - public int getNodeCount() { - return nodeCount; - } - - - @Override - public void meet(StatementPattern node) { - nodeCount += 1; - return; - } - - @Override - public void meet(Filter node) { - nodeCount += 1; - node.getArg().visit(this); - } - - } - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java deleted file mode 100644 index fa1dc13e6..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java +++ /dev/null @@ -1,35 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; - - - -import java.util.Set; - -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -public interface IndexListPruner { - - public Set getRelevantIndices(List indexList); - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java deleted file mode 100644 index 74df958a3..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java +++ /dev/null @@ -1,210 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; -import java.util.NoSuchElementException; -import java.util.Set; - -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.openrdf.query.algebra.BindingSetAssignment; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.google.common.collect.Sets; - - - - -public class IndexPlanValidator implements TupleValidator { - - private boolean omitCrossProd = false; - - - public IndexPlanValidator(boolean omitCrossProd) { - this.omitCrossProd = omitCrossProd; - } - - public void setOmitCrossProd(boolean omitCrossProd) { - this.omitCrossProd = omitCrossProd; - } - - - @Override - public boolean isValid(TupleExpr te) { - - TupleValidateVisitor tv = new TupleValidateVisitor(); - te.visit(tv); - - return tv.isValid(); - } - - - - - public int getValidTupleSize(Iterator iter) { - - int size = 0; - - while(iter.hasNext()) { - if(isValid(iter.next())) { - size++; - } - } - - return size; - - } - - - - @Override - public Iterator getValidTuples(Iterator tupleIter) { - - final Iterator iter = tupleIter; - - return new Iterator() { - - private TupleExpr next = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - - @Override - public boolean hasNext() { - - if (!hasNextCalled && !isEmpty) { - while (iter.hasNext()) { - TupleExpr temp = iter.next(); - if (isValid(temp)) { - next = temp; - hasNextCalled = true; - return true; - } - } - isEmpty = true; - return false; - } else if(isEmpty) { - return false; - }else { - return true; - } - } - - @Override - public TupleExpr next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if(isEmpty) { - throw new NoSuchElementException(); - }else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - } - } - - @Override - public void remove() { - - throw new UnsupportedOperationException("Cannot delete from iterator!"); - - } - - }; - } - - private boolean isJoinValid(Join join) { - - Set leftBindingNames = join.getLeftArg().getBindingNames(); - Set rightBindingNames = join.getRightArg().getBindingNames(); - - - //System.out.println("Left binding names are " + leftBindingNames + " and right binding names are " + rightBindingNames); - - if (Sets.intersection(leftBindingNames, rightBindingNames).size() == 0) { - if (omitCrossProd) { - return false; - } else { - return true; - } - - } else { - if (join.getRightArg() instanceof ExternalTupleSet) { - - return ((ExternalTupleSet) join.getRightArg()).supportsBindingSet(leftBindingNames); - - } else { - return true; - } - } - - } - - public class TupleValidateVisitor extends QueryModelVisitorBase { - - private boolean isValid = true; - - public boolean isValid() { - return isValid; - } - - @Override - public void meet(Projection node) { - node.getArg().visit(this); - } - - @Override - public void meet(StatementPattern node) { - return; - } - - public void meet(BindingSetAssignment node) { - return; - } - - @Override - public void meet(Filter node) { - node.getArg().visit(this); - } - - @Override - public void meet(Join node) { - if (isJoinValid(node)) { - super.meet(node); - } else { - isValid = false; - return; - } - } - - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java deleted file mode 100644 index 3586a5e74..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java +++ /dev/null @@ -1,33 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; - -import org.openrdf.query.algebra.TupleExpr; - -public interface IndexTupleGenerator { - - - public Iterator getPlans(Iterator indexPlans); - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java deleted file mode 100644 index acf3f6aeb..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java +++ /dev/null @@ -1,207 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; - -import mvm.rya.indexing.external.QueryVariableNormalizer; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.TupleExpr; - -import com.google.common.collect.BiMap; -import com.google.common.collect.HashBiMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -public class IndexedExecutionPlanGenerator implements ExternalIndexMatcher { - - private final TupleExpr query; - private List normalizedIndexList; - - public IndexedExecutionPlanGenerator(TupleExpr query, List indexList) { - this.query = query; - VarConstantIndexListPruner vci = new VarConstantIndexListPruner(query); - normalizedIndexList = getNormalizedIndices(vci.getRelevantIndices(indexList)); - } - - public List getNormalizedIndices() { - return normalizedIndexList; - } - - - - - @Override - public Iterator getIndexedTuples() { - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(query); - final Iterator> iter = vic.getValidIndexCombos(normalizedIndexList); - - return new Iterator() { - - private TupleExpr next = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - - @Override - public boolean hasNext() { - - if (!hasNextCalled && !isEmpty) { - while (iter.hasNext()) { - TupleExpr temp = GeneralizedExternalProcessor.process(query, iter.next()); - if (temp != null) { - next = temp; - hasNextCalled = true; - return true; - } - } - isEmpty = true; - return false; - } else if(isEmpty) { - return false; - } else { - return true; - } - } - - @Override - public TupleExpr next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if(isEmpty) { - throw new NoSuchElementException(); - }else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - - } - - } - - @Override - public void remove() { - - throw new UnsupportedOperationException("Cannot delete from iterator!"); - - } - - }; - } - - - private List getNormalizedIndices(Set indexSet) { - - ExternalTupleSet tempIndex; - List normalizedIndexSet = Lists.newArrayList(); - - for (ExternalTupleSet e : indexSet) { - - List tupList = null; - try { - tupList = QueryVariableNormalizer.getNormalizedIndex(query, e.getTupleExpr()); - } catch (Exception e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - for (TupleExpr te : tupList) { - - tempIndex = (ExternalTupleSet) e.clone(); - setTableMap(te, tempIndex); - setSupportedVarOrderMap(tempIndex); - tempIndex.setProjectionExpr((Projection) te); - normalizedIndexSet.add(tempIndex); - - } - - } - - return normalizedIndexSet; - } - - private void setTableMap(TupleExpr tupleMatch, ExternalTupleSet index) { - - List replacementVars = Lists.newArrayList(tupleMatch.getBindingNames()); - List tableVars = Lists.newArrayList(index.getTupleExpr().getBindingNames()); - - Map tableMap = Maps.newHashMap(); - - for (int i = 0; i < tableVars.size(); i++) { - tableMap.put(replacementVars.get(i), tableVars.get(i)); - } - // System.out.println("Table map is " + tableMap); - index.setTableVarMap(tableMap); - - } - - - private void setSupportedVarOrderMap(ExternalTupleSet index) { - - Map> supportedVarOrders = Maps.newHashMap(); - BiMap biMap = HashBiMap.create(index.getTableVarMap()).inverse(); - Map> oldSupportedVarOrders = index.getSupportedVariableOrderMap(); - - Set temp = null; - Set keys = oldSupportedVarOrders.keySet(); - - for (String s : keys) { - temp = oldSupportedVarOrders.get(s); - Set newSet = Sets.newHashSet(); - - for (String t : temp) { - newSet.add(biMap.get(t)); - } - - String[] tempStrings = s.split("\u0000"); - String v = ""; - for(String u: tempStrings) { - if(v.length() == 0){ - v = v + biMap.get(u); - } else { - v = v + "\u0000" + biMap.get(u); - } - } - - supportedVarOrders.put(v, newSet); - - } - - index.setSupportedVariableOrderMap(supportedVarOrders); - - } - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java deleted file mode 100644 index dbd1972f2..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java +++ /dev/null @@ -1,32 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; - -import org.openrdf.query.algebra.TupleExpr; - -public interface IndexedQueryPlanSelector { - - public TupleExpr getThreshholdQueryPlan(Iterator tupleList, double threshhold, - double indexWeight, double commonVarWeight, double dirProdWeight); - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java deleted file mode 100644 index a333dcb13..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java +++ /dev/null @@ -1,240 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; -import java.util.Set; - -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.openrdf.query.algebra.BindingSetAssignment; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.google.common.collect.Sets; - -public class ThreshholdPlanSelector implements IndexedQueryPlanSelector { - - private TupleExpr query; - private int queryNodeCount = 0; - - public ThreshholdPlanSelector(TupleExpr query) { - this.query = query; - QueryNodeCount qnc = new QueryNodeCount(); - query.visit(qnc); - - this.queryNodeCount = qnc.getNodeCount(); - - if(queryNodeCount == 0) { - throw new IllegalArgumentException("TupleExpr must contain at least one node!"); - } - } - - - - - @Override - public TupleExpr getThreshholdQueryPlan(Iterator tuples, double threshhold, double indexWeight, - double commonVarWeight, double extProdWeight) { - - if (threshhold < 0 || threshhold > 1) { - throw new IllegalArgumentException("Threshhold must be between 0 and 1!"); - } - double minCost = Double.MAX_VALUE; - TupleExpr minTup = null; - - double tempCost = 0; - TupleExpr tempTup = null; - - - - while (tuples.hasNext()) { - - tempTup = tuples.next(); - tempCost = getCost(tempTup, indexWeight, commonVarWeight, extProdWeight); - - if (tempCost < minCost) { - minCost = tempCost; - minTup = tempTup; - } - - if (minCost <= threshhold) { - return minTup; - } - - } - - return minTup; - } - - public double getCost(TupleExpr te, double indexWeight, double commonVarWeight, double dirProdWeight) { - - if (indexWeight + commonVarWeight + dirProdWeight != 1) { - throw new IllegalArgumentException("Weights must sum to 1!"); - } - - if(te == null) { - throw new IllegalArgumentException("TupleExpr cannot be null!"); - } - - QueryNodeCount qnc = new QueryNodeCount(); - te.visit(qnc); - - double nodeCount = qnc.getNodeCount(); - double commonJoinVars = qnc.getCommonJoinVarCount(); - double joinVars = qnc.getJoinVarCount(); - double joinCount = qnc.getJoinCount(); - double dirProdCount = qnc.getDirProdCount(); - double dirProductScale; - - if(queryNodeCount > nodeCount) { - dirProductScale = 1/((double)(queryNodeCount - nodeCount)); - } else { - dirProductScale = 1/((double)(queryNodeCount - nodeCount + 1)); - } - - double joinVarRatio; - double dirProductRatio; - - if(joinVars != 0) { - joinVarRatio = (joinVars - commonJoinVars) / joinVars; - } else { - joinVarRatio = 0; - } - - if(joinCount != 0) { - dirProductRatio = dirProdCount / joinCount; - } else { - dirProductRatio = 0; - } - - - double cost = indexWeight * (nodeCount / queryNodeCount) + commonVarWeight*joinVarRatio - + dirProdWeight *dirProductRatio*dirProductScale; - -// System.out.println("Tuple is " + te + " and cost is " + cost); -// System.out.println("Node count is " + nodeCount + " and query node count is " + queryNodeCount); -// System.out.println("Common join vars are " + commonJoinVars + " and join vars " + joinVars); -// System.out.println("Join count is " + joinCount + " and direct prod count is " + dirProdCount); - - return cost; - } - - public static class QueryNodeCount extends QueryModelVisitorBase { - - private int nodeCount = 0; - private int commonJoinVars = 0; - private int joinVars = 0; - private int joinCount = 0; - private int dirProdCount = 0; - - public int getCommonJoinVarCount() { - return commonJoinVars; - } - - public int getJoinVarCount() { - return joinVars; - } - - public int getNodeCount() { - return nodeCount; - } - - public int getJoinCount() { - return joinCount; - } - - public int getDirProdCount() { - return dirProdCount; - } - - public void meet(Projection node) { - node.getArg().visit(this); - } - - public void meetNode(QueryModelNode node) { - if (node instanceof ExternalTupleSet) { - nodeCount += 1; - return; - } - super.meetNode(node); - return; - } - - @Override - public void meet(StatementPattern node) { - nodeCount += 1; - return; - } - - @Override - public void meet(Filter node) { - nodeCount += 1; - node.getArg().visit(this); - } - - public void meet(BindingSetAssignment node) { - nodeCount += 1; - return; - } - - @Override - public void meet(Join node) { - - int tempCount = 0; - - Set lNames = node.getLeftArg().getAssuredBindingNames(); - Set rNames = node.getRightArg().getAssuredBindingNames(); - - for(String s: node.getLeftArg().getBindingNames()) { - if(s.startsWith("-const-")) { - lNames.remove(s); - } - } - - for(String s: node.getRightArg().getBindingNames()) { - if(s.startsWith("-const-")) { - rNames.remove(s); - } - } - - - joinVars += Math.min(lNames.size(), rNames.size()); - tempCount = Sets.intersection(lNames, rNames).size(); - if (tempCount == 0) { - dirProdCount += 1; - } else { - commonJoinVars += tempCount; - } - joinCount += 1; - - super.meet(node); - - } - - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java deleted file mode 100644 index 2776a9e37..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java +++ /dev/null @@ -1,215 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Set; - -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.openrdf.query.algebra.BindingSetAssignment; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.beust.jcommander.internal.Lists; -import com.google.common.collect.Collections2; -import com.google.common.collect.Sets; - -public class TupleExecutionPlanGenerator implements IndexTupleGenerator { - - - - @Override - public Iterator getPlans(Iterator indexPlans) { - - final Iterator iter = indexPlans; - - return new Iterator() { - - private TupleExpr next = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - Iterator tuples = null; - - @Override - public boolean hasNext() { - - if (!hasNextCalled && !isEmpty) { - if (tuples != null && tuples.hasNext()) { - next = tuples.next(); - hasNextCalled = true; - return true; - } else { - while (iter.hasNext()) { - tuples = getPlans(iter.next()).iterator(); - if (tuples == null) { - throw new IllegalStateException("Plans cannot be null!"); - } - next = tuples.next(); - hasNextCalled = true; - return true; - } - isEmpty = true; - return false; - } - } else if (isEmpty) { - return false; - } else { - return true; - } - } - - @Override - public TupleExpr next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if(isEmpty) { - throw new NoSuchElementException(); - }else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - - } - - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Cannot delete from iterator!"); - } - - }; - - } - - private List getPlans(TupleExpr te) { - - - NodeCollector nc = new NodeCollector(); - te.visit(nc); - - Set nodeSet = nc.getNodeSet(); - List filterList = nc.getFilterSet(); - Projection projection = nc.getProjection().clone(); - - List queryPlans = Lists.newArrayList(); - - Collection> plans = Collections2.permutations(nodeSet); - - for (List p : plans) { - if (p.size() == 0) { - throw new IllegalArgumentException("Tuple must contain at least one node!"); - } else if (p.size() == 1) { - queryPlans.add(te); - } else { - queryPlans.add(buildTuple(p, filterList, projection)); - } - } - - return queryPlans; - } - - private TupleExpr buildTuple(List nodes, List filters, Projection projection) { - - Projection proj = (Projection)projection.clone(); - Join join = null; - - join = new Join((TupleExpr) nodes.get(0).clone(), (TupleExpr) nodes.get(1).clone()); - - for (int i = 2; i < nodes.size(); i++) { - join = new Join(join, (TupleExpr) nodes.get(i).clone()); - } - - if (filters.size() == 0) { - proj.setArg(join); - return proj; - } else { - TupleExpr queryPlan = join; - for (Filter f : filters) { - Filter filt = (Filter) f.clone(); - filt.setArg(queryPlan); - queryPlan = filt; - } - proj.setArg(queryPlan); - return proj; - } - - } - - public static class NodeCollector extends QueryModelVisitorBase { - - private Set nodeSet = Sets.newHashSet(); - private List filterSet = Lists.newArrayList(); - private Projection projection; - - public Projection getProjection() { - return projection; - } - - public Set getNodeSet() { - return nodeSet; - } - - public List getFilterSet() { - return filterSet; - } - - @Override - public void meet(Projection node) { - projection = node; - node.getArg().visit(this); - } - - @Override - public void meetNode(QueryModelNode node) throws RuntimeException { - if (node instanceof ExternalTupleSet || node instanceof BindingSetAssignment - || node instanceof StatementPattern) { - nodeSet.add(node); - } - super.meetNode(node); - } - - @Override - public void meet(Filter node) { - filterSet.add(node); - node.getArg().visit(this); - } - - } - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java deleted file mode 100644 index 089ef5de6..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java +++ /dev/null @@ -1,348 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; - -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.rdftriplestore.inference.DoNotExpandSP; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; - -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.beust.jcommander.internal.Lists; -import com.google.common.collect.Collections2; -import com.google.common.collect.Maps; - - -//A given TupleExpr can be broken up into "join segments", which are sections of the TupleExpr where nodes can -//be freely exchanged. This class creates a list of permuted TupleExpr from a specified TupleExpr by permuting the nodes -//in each join segment. -public class TupleReArranger { - - private static Map>> joinArgs; - private static Map> filterArgs; - - - public static Iterator getPlans(Iterator indexPlans) { - - final Iterator iter = indexPlans; - - return new Iterator() { - - private TupleExpr next = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - Iterator tuples = null; - - @Override - public boolean hasNext() { - - if (!hasNextCalled && !isEmpty) { - if (tuples != null && tuples.hasNext()) { - next = tuples.next(); - hasNextCalled = true; - return true; - } else { - while (iter.hasNext()) { - tuples = getTupleReOrderings(iter.next()).iterator(); - if (tuples == null) { - throw new IllegalStateException("Plans cannot be null!"); - } - next = tuples.next(); - hasNextCalled = true; - return true; - } - isEmpty = true; - return false; - } - } else if (isEmpty) { - return false; - } else { - return true; - } - } - - @Override - public TupleExpr next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if (isEmpty) { - throw new NoSuchElementException(); - } else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Cannot delete from iterator!"); - } - }; - } - - - //Give a TupleExpr, return list of join segment permuted TupleExpr - public static List getTupleReOrderings(TupleExpr te) { - - joinArgs = Maps.newHashMap(); - filterArgs = Maps.newHashMap(); - - NodeCollector nc = new NodeCollector(); - te.visit(nc); - joinArgs = nc.getPerms(); - List joins = Lists.newArrayList(joinArgs.keySet()); - - return getPlans(getReOrderings(joins), te); - - } - - - //iterates through the reOrder maps, and for each reOrder map builds a new, reordered tupleExpr - private static List getPlans(List>> reOrderings, TupleExpr te) { - - List queryPlans = Lists.newArrayList(); - PermInserter pm = new PermInserter(); - - for (Map> order : reOrderings) { - TupleExpr clone = te.clone(); - pm.setReOrderMap(order); - clone.visit(pm); - queryPlans.add(clone); - } - - return queryPlans; - } - - - - //recursive method which produces a list of maps. Each map associates a join with - //a list of the non-join arguments below it contained in same join segment. The list - //represents an ordering of the - //non-join arguments and creating a TupleExpr from this map yields a new TupleExpr - //whose non-join arguments are permuted - private static List>> getReOrderings(List joins) { - Map> reOrder = Maps.newHashMap(); - List>> reOrderings = Lists.newArrayList(); - getReOrderings(joins, reOrder, reOrderings); - return reOrderings; - - } - - private static void getReOrderings(List joins, Map> reOrder, - List>> reOrderings) { - - if (joins.isEmpty()) { - reOrderings.add(reOrder); - return; - } - - List joinsCopy = Lists.newArrayList(joins); - Join join = joinsCopy.remove(0); - List> joinArgPerms = joinArgs.get(join); - for (List tupList : joinArgPerms) { - Map> newReOrder = Maps.newHashMap(reOrder); - newReOrder.put(join, tupList); - getReOrderings(joinsCopy, newReOrder, reOrderings); - } - - return; - - } - - - //creates a map which associates each first join of a TupleExpr join segment with all permutations of - //the non-join nodes after it. More specifically, each join is associated with a list of TupleExpr - //lists, where each list represents an ordering of the non-join nodes following the associated join - private static class NodeCollector extends QueryModelVisitorBase { - - private static List filterList; - - public Map>> getPerms() { - return joinArgs; - } - - @Override - public void meet(Join node) { - - filterList = Lists.newArrayList(); - - List args = Lists.newArrayList(); - args = getJoinArgs(node, args); - List> argPerms = Lists.newArrayList(Collections2.permutations(args)); - joinArgs.put(node, argPerms); - filterArgs.put(node, filterList); - - for (TupleExpr te : args) { - if (!(te instanceof StatementPattern) && !(te instanceof ExternalTupleSet)) { - te.visit(this); - } - } - - } - - - //get all non-join nodes below tupleExpr in same join segment - private static List getJoinArgs(TupleExpr tupleExpr, List joinArgs) { - if (tupleExpr instanceof Join) { - if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) - && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) { - Join join = (Join) tupleExpr; - getJoinArgs(join.getLeftArg(), joinArgs); - getJoinArgs(join.getRightArg(), joinArgs); - } // assumes all filter occur above first join of segment -- - // this should be the state - // after PrecompJoinOptimizer is called - } else if (tupleExpr instanceof Filter) { - filterList.add((Filter) tupleExpr); - getJoinArgs(((Filter) tupleExpr).getArg(), joinArgs); - } else { - joinArgs.add(tupleExpr); - } - - return joinArgs; - } - - } - - - - //for a given reOrder map, searches through TupleExpr and places each reordered collection - //of nodes at appropriate join - private static class PermInserter extends QueryModelVisitorBase { - - private Map> reOrderMap = Maps.newHashMap(); - - public void setReOrderMap(Map> reOrderMap) { - this.reOrderMap = reOrderMap; - } - - @Override - public void meet(Join node) { - - List reOrder = reOrderMap.get(node); - if (reOrder != null) { - List filterList = Lists.newArrayList(filterArgs.get(node)); - node.replaceWith(getNewJoin(reOrder, getFilterChain(filterList))); - - for (TupleExpr te : reOrder) { - if (!(te instanceof StatementPattern) && !(te instanceof ExternalTupleSet)) { - te.visit(this); - } - } - } - super.meet(node); - } - } - - - // chain filters together and return front and back of chain - private static List getFilterChain(List filters) { - List filterTopBottom = Lists.newArrayList(); - Filter filterChainTop = null; - Filter filterChainBottom = null; - - for (Filter filter : filters) { - if (filterChainTop == null) { - filterChainTop = filter.clone(); - } else if (filterChainBottom == null) { - filterChainBottom = filter.clone(); - filterChainTop.setArg(filterChainBottom); - } else { - Filter newFilter = filter.clone(); - filterChainBottom.setArg(newFilter); - filterChainBottom = newFilter; - } - } - if (filterChainTop != null) { - filterTopBottom.add(filterChainTop); - } - if (filterChainBottom != null) { - filterTopBottom.add(filterChainBottom); - } - return filterTopBottom; - } - - // build newJoin node given remaining joinArgs and chain of filters - private static TupleExpr getNewJoin(List args, List filterChain) { - TupleExpr newJoin; - List joinArgs = Lists.newArrayList(args); - - if (joinArgs.size() > 1) { - if (filterChain.size() > 0) { - TupleExpr finalJoinArg = joinArgs.remove(0).clone(); - TupleExpr tempJoin; - TupleExpr temp = filterChain.get(0); - - if (joinArgs.size() > 1) { - tempJoin = new Join(joinArgs.remove(0).clone(), joinArgs.remove(0).clone()); - for (TupleExpr te : joinArgs) { - tempJoin = new Join(tempJoin, te.clone()); - } - } else { - tempJoin = joinArgs.remove(0).clone(); - } - - if (filterChain.size() == 1) { - ((Filter) temp).setArg(tempJoin); - } else { - ((Filter) filterChain.get(1)).setArg(tempJoin); - } - newJoin = new Join(temp, finalJoinArg); - } else { - newJoin = new Join(joinArgs.remove(0).clone(), joinArgs.remove(0).clone()); - - for (TupleExpr te : joinArgs) { - newJoin = new Join(newJoin, te.clone()); - } - } - } else if (joinArgs.size() == 1) { - if (filterChain.size() > 0) { - newJoin = filterChain.get(0); - if (filterChain.size() == 1) { - ((Filter) newJoin).setArg(joinArgs.get(0).clone()); - } else { - ((Filter) filterChain.get(1)).setArg(joinArgs.get(0).clone()); - } - } else { - newJoin = joinArgs.get(0).clone(); - } - } else { - throw new IllegalStateException("JoinArgs size cannot be zero."); - } - return newJoin; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java deleted file mode 100644 index 4960d78be..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java +++ /dev/null @@ -1,34 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; - -import org.openrdf.query.algebra.TupleExpr; - -public interface TupleValidator { - - public boolean isValid(TupleExpr te); - - public Iterator getValidTuples(Iterator tupleList); - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java deleted file mode 100644 index b3c3fcdc9..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java +++ /dev/null @@ -1,671 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Set; - -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.base.Joiner; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -public class ValidIndexCombinationGenerator { - - - private TupleExpr query; - private Set invalidCombos = Sets.newTreeSet(); - private Set spFilterSet; - - - public ValidIndexCombinationGenerator(TupleExpr query) { - this.query = query; - SpFilterCollector sfc = new SpFilterCollector(); - query.visit(sfc); - spFilterSet = sfc.getSpFilterSet(); - } - - - - - public Iterator> getValidIndexCombos(List indexSet) { - - Collections.shuffle(indexSet); - final List list = indexSet; - final Iterator> iter = getValidCombos(list); - - return new Iterator>() { - - private List next = null; - private List nextCombo = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - - @Override - public boolean hasNext() { - - if (!hasNextCalled && !isEmpty) { - if (!iter.hasNext()) { - isEmpty = true; - return false; - } else { - nextCombo = iter.next(); - List indexCombo = Lists.newArrayList(); - for (Integer i : nextCombo) { - indexCombo.add(list.get(i)); - } - next = indexCombo; - hasNextCalled = true; - return true; - - } - - } else if (isEmpty) { - return false; - } else { - return true; - } - } - - @Override - public List next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if(isEmpty) { - throw new NoSuchElementException(); - }else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - } - } - - @Override - public void remove() { - - throw new UnsupportedOperationException("Cannot delete from iterator!"); - - } - - }; - - } - - - - private Iterator> getValidCombos(List indexList) { - - - final List list = indexList; - final int indexSize = list.size(); - final Iterator> iter = getCombos(indexSize); - - - return new Iterator>() { - - private List next = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - - @Override - public boolean hasNext() { - if (!hasNextCalled && !isEmpty) { - - while (iter.hasNext()) { - List tempNext = iter.next(); - if (isValid(tempNext, list)) { - next = tempNext; - hasNextCalled = true; - return true; - } - - } - - isEmpty = true; - return false; - - } else if (isEmpty) { - return false; - } else { - return true; - } - } - - @Override - public List next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if (isEmpty) { - throw new NoSuchElementException(); - } else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - - } - - } - - @Override - public void remove() { - - throw new UnsupportedOperationException("Cannot delete from iterator!"); - - } - - }; - } - - - - - - - private Iterator> getCombos(int indexListSize) { - - final int indexSize = indexListSize; - final int maxSubListSize = spFilterSet.size() / 2; - - return new Iterator>() { - - private List next = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - private int subListSize = Math.min(maxSubListSize, indexSize) + 1; - Iterator> subList = null; - - @Override - public boolean hasNext() { - - if (!hasNextCalled && !isEmpty) { - if (subList != null && subList.hasNext()) { - next = subList.next(); - hasNextCalled = true; - return true; - } else { - subListSize--; - if (subListSize == 0) { - isEmpty = true; - return false; - } - subList = getCombos(subListSize, indexSize); - if (subList == null) { - throw new IllegalStateException("Combos cannot be null!"); - } - next = subList.next(); - hasNextCalled = true; - return true; - - } - } else if (isEmpty) { - return false; - } else { - return true; - } - } - - @Override - public List next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if (isEmpty) { - throw new NoSuchElementException(); - } else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - - } - - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Cannot delete from iterator!"); - } - - }; - - } - - - - private Iterator> getCombos(int subListSize, int indexListSize) { - - if(subListSize > indexListSize) { - throw new IllegalArgumentException("Sublist size must be less than or equal to list size!"); - } - - final int subSize = subListSize; - final int indexSize = indexListSize; - - return new Iterator>() { - - private List next = null; - private List tempList = Lists.newArrayList(); - private boolean calledHasNext = false; - private boolean isEmpty = false; - - @Override - public boolean hasNext() { - - if (!calledHasNext && !isEmpty) { - if (next == null) { - for (int i = 0; i < subSize; i++) { - tempList.add(i); - } - next = tempList; - calledHasNext = true; - return true; - } else { - next = getNext(next, indexSize - 1); - if (next == null) { - isEmpty = true; - return false; - } else { - calledHasNext = true; - return true; - } - - } - } else if(isEmpty) { - return false; - } else { - return true; - } - - } - - @Override - public List next() { - - if (calledHasNext) { - calledHasNext = false; - return next; - } else if (isEmpty) { - throw new NoSuchElementException(); - } else { - if (this.hasNext()) { - calledHasNext = false; - return next; - } else { - throw new NoSuchElementException(); - } - } - } - @Override - public void remove() { - throw new UnsupportedOperationException(); - - } - - - - }; - } - - - - - - - private List getNext(List prev, int maxInt) { - - List returnList = Lists.newArrayList(); - int size = prev.size(); - int incrementPos = -1; - int incrementVal = 0; - - for(int i = 0; i < size; i++) { - if(prev.get(size-(i+1)) != maxInt - i) { - incrementPos = size - (i+1); - break; - } - } - - if (incrementPos == -1) { - return null; - } else { - - incrementVal = prev.get(incrementPos); - for (int i = 0; i < incrementPos; i++) { - returnList.add(prev.get(i)); - } - - for (int j = incrementPos; j < size; j++) { - returnList.add(++incrementVal); - } - - return returnList; - } - } - - - - - private boolean isValid(List combo, List indexList) { - - String s1 = Joiner.on("\u0000").join(combo).trim(); - - if(invalidCombos.contains(s1)) { - return false; - } else { - int valid = indicesDisjoint(combo, indexList); - - if (valid >= 0) { - String s2 = ""; - for (int i = 0; i < valid + 1; i++) { - if (s2.length() == 0) { - s2 = s2 + combo.get(i); - } else { - s2 = s2 + "\u0000" + combo.get(i); - } - } - invalidCombos.add(s2); - - for (int i = valid + 1; i < combo.size(); i++) { - s2 = s2 + "\u0000" + combo.get(i); - invalidCombos.add(s2); - } - - return false; - } else { - return true; - } - } - - - } - - - - private int indicesDisjoint(List combo, List indexList) { - - Set indexNodes = Sets.newHashSet(); - Set tempNodes; - TupleExpr temp; - - - int j = 0; - for(Integer i: combo) { - temp = indexList.get(i).getTupleExpr(); - SpFilterCollector spf = new SpFilterCollector(); - temp.visit(spf); - tempNodes = spf.getSpFilterSet(); - if(Sets.intersection(indexNodes, tempNodes).size() == 0) { - indexNodes = Sets.union(indexNodes, tempNodes); - if(indexNodes.size() > spFilterSet.size()) { - return j; - } - } else { - return j; - } - j++; - } - - return -1; - } - - - - - public static void main(String[] args) { - - - String q1 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?f ?m ." // - + " ?m ?d ." // - + " ?f ?m ." // - + " ?m ?d ." // - + "}";// - - - String q2 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - String q5 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q6 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + " ?t ?u ." // - + "}";// - - - - String q8 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s ?t ." // - + "}";// - - - String q9 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + "}";// - - - - - - - - - - SPARQLParser parser = new SPARQLParser(); - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - ParsedQuery pq6 = null; - ParsedQuery pq7 = null; - ParsedQuery pq8 = null; - ParsedQuery pq9 = null; - - SimpleExternalTupleSet extTup1 = null; - SimpleExternalTupleSet extTup2 = null; - SimpleExternalTupleSet extTup3 = null; - SimpleExternalTupleSet extTup4 = null; - SimpleExternalTupleSet extTup5 = null; - SimpleExternalTupleSet extTup6 = null; - SimpleExternalTupleSet extTup7 = null; - SimpleExternalTupleSet extTup8 = null; - - - - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - pq6 = parser.parseQuery(q6, null); - pq7 = parser.parseQuery(q7, null); - pq8 = parser.parseQuery(q8, null); - pq9 = parser.parseQuery(q9, null); - - - extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - extTup5 = new SimpleExternalTupleSet((Projection) pq6.getTupleExpr()); - extTup6 = new SimpleExternalTupleSet((Projection) pq7.getTupleExpr()); - extTup7 = new SimpleExternalTupleSet((Projection) pq8.getTupleExpr()); - extTup8 = new SimpleExternalTupleSet((Projection) pq9.getTupleExpr()); - - - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - List indexList = Lists.newArrayList(); - indexList.add(extTup1); - indexList.add(extTup2); - indexList.add(extTup3); - indexList.add(extTup4); - indexList.add(extTup5); - indexList.add(extTup6); - indexList.add(extTup7); - indexList.add(extTup8); - - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> combos = vic.getValidIndexCombos(indexList); - int size = 0; - while(combos.hasNext()) { - combos.hasNext(); - size++; - List eSet = combos.next(); - System.out.println("********************************************"); - for(ExternalTupleSet e: eSet) { - System.out.println(e.getTupleExpr()); - } - System.out.println("********************************************"); - } - - System.out.println("size is " + size + " has next " + combos.hasNext()); - } - - - - - - private static class SpFilterCollector extends QueryModelVisitorBase { - - private Set spFilterSet = Sets.newHashSet(); - - - public int getNodeNumber() { - return spFilterSet.size(); - } - - - public Set getSpFilterSet() { - return spFilterSet; - } - - - @Override - public void meet(StatementPattern node) { - - spFilterSet.add(node); - return; - - } - - - @Override - public void meet(Filter node) { - - spFilterSet.add(node.getCondition()); - node.getArg().visit(this); - } - - - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java deleted file mode 100644 index 7e7282185..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java +++ /dev/null @@ -1,171 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.ValueConstant; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.beust.jcommander.internal.Maps; -import com.google.common.collect.Sets; - - - - -public class VarConstantIndexListPruner implements IndexListPruner { - - private Map queryConstantMap; - private int querySpCount; - private int queryFilterCount; - - public VarConstantIndexListPruner(TupleExpr te) { - - ConstantCollector cc = new ConstantCollector(); - te.visit(cc); - this.queryConstantMap = cc.getConstantMap(); - querySpCount = cc.getSpCount(); - queryFilterCount = cc.getFilterCount(); - } - - public Set getRelevantIndices(List indexList) { - - Set relIndexSet = Sets.newHashSet(); - - for (ExternalTupleSet e : indexList) { - - if (isRelevant(e.getTupleExpr())) { - relIndexSet.add(e); - } - - } - - return relIndexSet; - } - - private boolean isRelevant(TupleExpr index) { - - ConstantCollector cc = new ConstantCollector(); - index.visit(cc); - - Map indexConstantMap = cc.getConstantMap(); - int indexSpCount = cc.getSpCount(); - int indexFilterCount = cc.getFilterCount(); - Set indexConstants = indexConstantMap.keySet(); - - if ((indexSpCount > querySpCount) || (indexFilterCount > queryFilterCount) - || !(Sets.intersection(indexConstants, queryConstantMap.keySet()).equals(indexConstants))) { - return false; - } - - for (String s : indexConstants) { - if (indexConstantMap.get(s) > queryConstantMap.get(s)) { - return false; - } - } - - return true; - } - - - private static class ConstantCollector extends QueryModelVisitorBase { - - private Map constantMap = Maps.newHashMap(); - private int spCount = 0; - private int filterCount = 0; - - - @Override - public void meet(StatementPattern node) throws RuntimeException { - - spCount++; - super.meet(node); - - } - - - @Override - public void meet(Filter node) throws RuntimeException { - - filterCount++; - super.meet(node); - - } - - - - - @Override - public void meet(Var node) throws RuntimeException { - - if (node.isConstant()) { - String key = node.getValue().toString(); - if(constantMap.containsKey(key)){ - int count = constantMap.get(key); - count += 1; - constantMap.put(key, count); - } else { - constantMap.put(key, 1); - } - } - - } - - - public void meet(ValueConstant node) throws RuntimeException { - - String key = node.getValue().toString(); - - if(constantMap.containsKey(key)) { - int count = constantMap.get(key); - count += 1; - constantMap.put(key, count); - } else { - constantMap.put(key,1); - } - - } - - - public Map getConstantMap() { - return constantMap; - } - - public int getSpCount(){ - return spCount; - } - - - public int getFilterCount() { - return filterCount; - } - - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java deleted file mode 100644 index 1d4c4bb76..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java +++ /dev/null @@ -1,94 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Set; - -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -import com.google.common.collect.Sets; - -public class IndexingExpr { - - private final URI function; - private final Value[] arguments; - private final StatementPattern spConstraint; - - public IndexingExpr(URI function, StatementPattern spConstraint, Value... arguments) { - this.function = function; - this.arguments = arguments; - this.spConstraint = spConstraint; - } - - public URI getFunction() { - return function; - } - - public Value[] getArguments() { - return arguments; - } - - public StatementPattern getSpConstraint() { - return spConstraint; - } - - - public Set getBindingNames() { - //resource and match variable for search are already included as standard result-bindings - Set bindings = Sets.newHashSet(); - - for(Var v: spConstraint.getVarList()) { - if(!v.isConstant()) { - bindings.add(v.getName()); - } - } - return bindings; - } - - - @Override - public boolean equals(Object other) { - if (!(other instanceof IndexingExpr)) { - return false; - } - IndexingExpr arg = (IndexingExpr) other; - return (this.function.equals(arg.function)) && (this.spConstraint.equals(arg.spConstraint)) - && (this.arguments.equals(arg.arguments)); - } - - - @Override - public int hashCode() { - int result = 17; - result = 31*result + function.hashCode(); - result = 31*result + spConstraint.hashCode(); - result = 31*result + arguments.hashCode(); - - return result; - } - -} - - - diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java deleted file mode 100644 index e96b8a3fa..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java +++ /dev/null @@ -1,136 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.indexing.accumulo.geo.GeoConstants; - -import org.openrdf.model.URI; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.query.algebra.ValueConstant; -import org.openrdf.query.algebra.ValueExpr; -import org.openrdf.query.algebra.Var; - -import com.google.common.collect.Maps; - -public class IndexingFunctionRegistry { - - - private static final Map SEARCH_FUNCTIONS = Maps.newHashMap(); - - static { - - String TEMPORAL_NS = "tag:rya-rdf.org,2015:temporal#"; - - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"after"),FUNCTION_TYPE.TEMPORAL); - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"before"), FUNCTION_TYPE.TEMPORAL); - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"equals"), FUNCTION_TYPE.TEMPORAL); - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"beforeInterval"), FUNCTION_TYPE.TEMPORAL); - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"afterInterval"), FUNCTION_TYPE.TEMPORAL); - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"insideInterval"), FUNCTION_TYPE.TEMPORAL); - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"hasBeginningInterval"), FUNCTION_TYPE.TEMPORAL); - SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"hasEndInterval"), FUNCTION_TYPE.TEMPORAL); - - - SEARCH_FUNCTIONS.put(new URIImpl("http://rdf.useekm.com/fts#text"), FUNCTION_TYPE.FREETEXT); - - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_EQUALS, FUNCTION_TYPE.GEO); - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_DISJOINT, FUNCTION_TYPE.GEO); - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_INTERSECTS, FUNCTION_TYPE.GEO); - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_TOUCHES, FUNCTION_TYPE.GEO); - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_WITHIN, FUNCTION_TYPE.GEO); - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_CONTAINS, FUNCTION_TYPE.GEO); - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_OVERLAPS, FUNCTION_TYPE.GEO); - SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_CROSSES, FUNCTION_TYPE.GEO); - - } - - public enum FUNCTION_TYPE {GEO, TEMPORAL, FREETEXT}; - - - public static Set getFunctions() { - return SEARCH_FUNCTIONS.keySet(); - } - - - public static Var getResultVarFromFunctionCall(URI function, List args) { - - FUNCTION_TYPE type = SEARCH_FUNCTIONS.get(function); - - switch(type) { - case GEO: - return findBinaryResultVar(args); - case FREETEXT: - return findLiteralResultVar(args); - case TEMPORAL: - return findBinaryResultVar(args); - default: - return null; - } - - } - - - public static FUNCTION_TYPE getFunctionType(URI func) { - return SEARCH_FUNCTIONS.get(func); - } - - - - private static boolean isUnboundVariable(ValueExpr expr) { - return expr instanceof Var && !((Var)expr).hasValue(); - } - - private static boolean isConstant(ValueExpr expr) { - return expr instanceof ValueConstant || (expr instanceof Var && ((Var)expr).hasValue()); - } - - - private static Var findBinaryResultVar(List args) { - - if (args.size() >= 2) { - ValueExpr arg1 = args.get(0); - ValueExpr arg2 = args.get(1); - if (isUnboundVariable(arg1) && isConstant(arg2)) - return (Var) arg1; - else if (isUnboundVariable(arg2) && isConstant(arg1)) - return (Var) arg2; - } - return null; - } - - - private static Var findLiteralResultVar(List args) { - if (args.size() >= 2) { - ValueExpr arg1 = args.get(0); - ValueExpr arg2 = args.get(1); - if (isUnboundVariable(arg1) && isConstant(arg2)) - return (Var)arg1; - } - return null; - } - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java b/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java deleted file mode 100644 index d61c5ae0a..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java +++ /dev/null @@ -1,159 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.NoSuchElementException; -import java.util.Set; - -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.impl.MapBindingSet; - - -//Given StatementPattern constraint and SearchFunction associated with an Indexing Node, -//creates appropriate StatementConstraints object from StatementPattern constraint and -//binding set and then uses SearchFunction to delegate query to appropriate index. -//Resulting iterator over statements is then converted to an iterator over binding sets -public class IteratorFactory { - - public static CloseableIteration getIterator(final StatementPattern match, - final BindingSet bindings, final String queryText, final SearchFunction searchFunction) { - return new CloseableIteration() { - - private boolean isClosed = false; - private CloseableIteration statementIt = null; - - private String subjectBinding = match.getSubjectVar().getName(); - private String predicateBinding = match.getPredicateVar().getName(); - private String objectBinding = match.getObjectVar().getName(); - private String contextBinding = null; - - private void performQuery() throws QueryEvaluationException { - - StatementContraints contraints = new StatementContraints(); - - // get the context (i.e. named graph) of the statement and use that in the query - QueryModelNode parentNode = match.getSubjectVar().getParentNode(); - if (parentNode instanceof StatementPattern) { - StatementPattern parentStatement = (StatementPattern) parentNode; - Var contextVar = parentStatement.getContextVar(); - if (contextVar != null) { - contextBinding = contextVar.getName(); - Resource context = (Resource) contextVar.getValue(); - contraints.setContext(context); - } - } - - // get the subject constraint - if (match.getSubjectVar().isConstant()) { - // get the subject binding from the filter/statement pair - Resource subject = (Resource) match.getSubjectVar().getValue(); - contraints.setSubject(subject); - } else if (bindings.hasBinding(subjectBinding)) { - // get the subject binding from the passed in bindings (eg from other statements/parts of the tree) - Resource subject = (Resource) bindings.getValue(subjectBinding); - contraints.setSubject(subject); - } - - // get the predicate constraint - if (match.getPredicateVar().isConstant()) { - // get the predicate binding from the filter/statement pair - Set predicates = new HashSet(getPredicateRestrictions(match.getPredicateVar())); - contraints.setPredicates(predicates); - } else if (bindings.hasBinding(predicateBinding)) { - // get the predicate binding from the passed in bindings (eg from other statements/parts of the tree) - URI predicateUri = (URI) bindings.getValue(predicateBinding); - Set predicates = Collections.singleton(predicateUri); - contraints.setPredicates(predicates); - } - - statementIt = searchFunction.performSearch(queryText, contraints); - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - if (statementIt == null) { - performQuery(); - } - return statementIt.hasNext(); - } - - @Override - public BindingSet next() throws QueryEvaluationException { - if (!hasNext() || isClosed) { - throw new NoSuchElementException(); - } - - Statement statment = statementIt.next(); - - MapBindingSet bset = new MapBindingSet(); - if (!subjectBinding.startsWith("-const")) - bset.addBinding(subjectBinding, statment.getSubject()); - if (!predicateBinding.startsWith("-const")) - bset.addBinding(predicateBinding, statment.getPredicate()); - if (!objectBinding.startsWith("-const")) - bset.addBinding(objectBinding, statment.getObject()); - if (contextBinding != null && !contextBinding.startsWith("-const")) - bset.addBinding(contextBinding, statment.getContext()); - - // merge with other bindings. - for (String name : bindings.getBindingNames()) { - bset.addBinding(name, bindings.getValue(name)); - } - - return bset; - } - - @Override - public void remove() throws QueryEvaluationException { - throw new UnsupportedOperationException(); - - } - - @Override - public void close() throws QueryEvaluationException { - if (statementIt != null) { - statementIt.close(); - } - isClosed = true; - } - - }; - - } - - public static Collection getPredicateRestrictions(Var predicate) { - if (predicate.hasValue()) - return Collections.singleton((URI) predicate.getValue()); - return Collections.emptyList(); - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java b/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java deleted file mode 100644 index 2caf81c75..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java +++ /dev/null @@ -1,331 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; - -import mvm.rya.indexing.accumulo.Md5Hash; -import mvm.rya.indexing.accumulo.StatementSerializer; - -import org.apache.accumulo.core.data.Value; -import org.apache.commons.codec.binary.StringUtils; -import org.apache.hadoop.io.Text; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.URIImpl; - -/** - * Store and format the various temporal index keys. - * Row Keys are in these two forms, where [x] denotes x is optional: - * rowkey = contraintPrefix datetime - * rowkey = datetime 0x/00 uniquesuffix - * contraintPrefix = 0x/00 hash([subject][predicate]) - * uniquesuffix = some bytes to make it unique, like hash(statement). - * - * The instance is in one of two modes depending on the constructor: - * storage mode -- construct with a triple statement, get an iterator of keys to store. - * query mode -- construct with a statement and query constraints, get the key prefix to search. - * - * this has the flavor of an immutable object - * This is independent of the underlying database engine - * - * @author David.Lotts - * - */ -public class KeyParts implements Iterable { - private static final String CQ_S_P_AT = "spo"; - private static final String CQ_P_AT = "po"; - private static final String CQ_S_AT = "so"; - private static final String CQ_O_AT = "o"; - public static final String CQ_BEGIN = "begin"; - public static final String CQ_END = "end"; - - public static final byte[] HASH_PREFIX = new byte[] {0}; - public static final byte[] HASH_PREFIX_FOLLOWING = new byte[] {1}; - - public final Text cf; - public final Text cq; - public final Text constraintPrefix; // subject and/or predicate - final Text storeKey; // subject and/or predicate - final private TemporalInstant instant; - final private Statement statement; - final private boolean queryMode; - KeyParts(Text constraintPrefix, TemporalInstant instant, String cf, String cq) { - this.queryMode = true; // query mode - this.storeKey = null; - this.statement = null; - this.constraintPrefix = constraintPrefix; - this.instant = instant; - this.cf = new Text(cf); - this.cq = new Text(cq); - } - - /** - * this is the value to index. - * @return - */ - public Value getValue() { - assert statement!=null; - return new Value(StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement))); - } - - public KeyParts(Statement statement, TemporalInstant instant2) { - this.queryMode = false; // store mode - this.storeKey = null; - this.constraintPrefix = null; - this.statement = statement; - this.instant = instant2; - this.cf = null; - this.cq = null; - } - - private KeyParts(Text keyText, Text cf, Text cq, Statement statement) { - this.queryMode = false; // store mode - this.constraintPrefix = null; - this.statement = statement; - this.instant = null; - this.storeKey = keyText; - this.cf = cf; - this.cq = cq; - } - - @Override - public Iterator iterator() { - final String[] strategies = new String[] { - CQ_O_AT, CQ_S_P_AT, CQ_P_AT, CQ_S_AT - } ; // CQ_END? - assert !queryMode : "iterator for queryMode is not immplemented" ; - if (queryMode) - return null; - - // if (!queryMode) - return new Iterator() { - int nextStrategy = 0; - - @Override - public boolean hasNext() { - return nextStrategy < strategies.length; - } - - @Override - public KeyParts next() { - assert(statement!=null); - Text keyText = new Text(); - // increment++ the next strategy AFTER getting the value - switch (nextStrategy++) { - case 0: // index o+hash(p+s) - assert (CQ_O_AT.equals(strategies[0])); - keyText = new Text(instant.getAsKeyBytes()); - KeyParts.appendUniqueness(statement, keyText); - return new KeyParts(keyText, new Text(StatementSerializer.writeContext(statement)), new Text(CQ_O_AT), statement); - case 1:// index hash(s+p)+o - assert (CQ_S_P_AT.equals(strategies[1])); - KeyParts.appendSubjectPredicate(statement, keyText); - KeyParts.appendInstant(instant, keyText); - // appendUniqueness -- Not needed since it is already unique. - return new KeyParts(keyText, new Text(StatementSerializer.writeContext(statement)), new Text(CQ_S_P_AT), statement); - case 2: // index hash(p)+o - assert (CQ_P_AT.equals(strategies[2])); - KeyParts.appendPredicate(statement, keyText); - KeyParts.appendInstant(instant, keyText); - KeyParts.appendUniqueness(statement, keyText); - return new KeyParts(keyText, new Text(StatementSerializer.writeContext(statement)), new Text(CQ_P_AT), statement); - case 3: // index hash(s)+o - assert (CQ_S_AT.equals(strategies[3])); - KeyParts.appendSubject(statement, keyText); - KeyParts.appendInstant(instant, keyText); - KeyParts.appendUniqueness(statement, keyText); - return new KeyParts(keyText, new Text(StatementSerializer.writeContext(statement)), new Text(CQ_S_AT), statement); - } - throw new Error("Next passed end? No such nextStrategy="+(nextStrategy-1)); - - } - - @Override - public void remove() { - throw new Error("Remove not Implemented."); - } - }; - } - - public byte[] getStoreKey() { - assert !queryMode : "must be in store Mode, store keys are not initialized."; - return this.storeKey.copyBytes(); - } - - /** - * Query key is the prefix plus the datetime, but no uniqueness at the end. - * @return the row key for range queries. - */ - public Text getQueryKey() { - return getQueryKey(this.instant); - }; - - /** - * Query key is the prefix plus the datetime, but no uniqueness at the end. - * - * @return the row key for range queries. - */ - public Text getQueryKey(TemporalInstant theInstant) { - assert queryMode : "must be in query Mode, query keys are not initialized."; - Text keyText = new Text(); - if (constraintPrefix != null) - appendBytes(constraintPrefix.copyBytes(), keyText); - appendInstant(theInstant, keyText); - return keyText; - }; - - @Override - public String toString() { - return "KeyParts [contraintPrefix=" + toHumanString(constraintPrefix) + ", instant=" + toHumanString(instant.getAsKeyBytes()) + ", cf=" + cf + ", cq=" + cq + "]"; - } - private static void appendSubject(Statement statement, Text keyText) { - Value statementValue = new Value(StatementSerializer.writeSubject(statement).getBytes()); - byte[] hashOfValue = uniqueFromValueForKey(statementValue); - appendBytes(HASH_PREFIX, keyText); // prefix the hash with a zero byte. - appendBytes(hashOfValue, keyText); - } - - private static void appendPredicate(Statement statement, Text keyText) { - Value statementValue = new Value(StringUtils.getBytesUtf8(StatementSerializer.writePredicate(statement))); - byte[] hashOfValue = uniqueFromValueForKey(statementValue); - appendBytes(HASH_PREFIX, keyText); // prefix the hash with a zero byte. - appendBytes(hashOfValue, keyText); - } - - private static void appendInstant(TemporalInstant instant, Text keyText) { - byte[] bytes = instant.getAsKeyBytes(); - appendBytes(bytes, keyText); - } - - private static void appendSubjectPredicate(Statement statement, Text keyText) { - Value statementValue = new Value(StringUtils.getBytesUtf8(StatementSerializer.writeSubjectPredicate(statement))); - byte[] hashOfValue = uniqueFromValueForKey(statementValue); - appendBytes(HASH_PREFIX, keyText); // prefix the hash with a zero byte. - appendBytes(hashOfValue, keyText); - } - - /** - * Append any byte array to a row key. - * @param bytes append this - * @param keyText text to append to - */ - private static void appendBytes(byte[] bytes, Text keyText) { - keyText.append(bytes, 0, bytes.length); - } - - /** - * Get a collision unlikely hash string and append to the key, - * so that if two keys have the same value, then they will be the same, - * if two different values that occur at the same time there keys are different. - * If the application uses a very large number of statements at the exact same time, - * the md5 value might be upgraded to for example sha-1 to avoid collisions. - * @param statement - * @param keyText - */ - public static void appendUniqueness(Statement statement, Text keyText) { - keyText.append(HASH_PREFIX, 0, 1); // delimiter - Value statementValue = new Value(StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement))); - byte[] hashOfValue = Md5Hash.md5Binary(statementValue); - keyText.append(hashOfValue, 0, hashOfValue.length); - } - /** - * Get a collision unlikely hash string to append to the key, - * so that if two keys have the same value, then they will be the same, - * if two different values that occur at the same time there keys are different. - * @param value - * @return - */ - private static byte[] uniqueFromValueForKey(Value value) { - return Md5Hash.md5Binary(value); - } - - /** - * List all the index keys to find for any query. Set the strategy via the column qualifier, ex: CQ_S_P_AT. - * Column Family (CF) is the context/named-graph. - * @param queryInstant - * @param contraints - * @return - */ - static public List keyPartsForQuery(TemporalInstant queryInstant, StatementContraints contraints) { - List keys = new LinkedList(); - URI urlNull = new URIImpl("urn:null"); - Resource currentContext = contraints.getContext(); - boolean hasSubj = contraints.hasSubject(); - if (contraints.hasPredicates()) { - for (URI nextPredicate : contraints.getPredicates()) { - Text contraintPrefix = new Text(); - Statement statement = new ContextStatementImpl(hasSubj ? contraints.getSubject() : urlNull, nextPredicate, urlNull, contraints.getContext()); - if (hasSubj) - appendSubjectPredicate(statement, contraintPrefix); - else - appendPredicate(statement, contraintPrefix); - keys.add(new KeyParts(contraintPrefix, queryInstant, (currentContext==null)?"":currentContext.toString(), hasSubj?CQ_S_P_AT:CQ_P_AT )); - } - } - else if (contraints.hasSubject()) { // and no predicates - Text contraintPrefix = new Text(); - Statement statement = new StatementImpl(contraints.getSubject(), urlNull, urlNull); - appendSubject(statement, contraintPrefix); - keys.add( new KeyParts(contraintPrefix, queryInstant, (currentContext==null)?"":currentContext.toString(), CQ_S_AT) ); - } - else { - // No constraints except possibly a context/named-graph, handled by the CF - keys.add( new KeyParts(null, queryInstant, (currentContext==null)?"":currentContext.toString(), CQ_O_AT) ); - } - return keys; - } - /** - * convert a non-utf8 byte[] and text and value to string and show unprintable bytes as {xx} where x is hex. - * @param value - * @return Human readable representation. - */ - public static String toHumanString(Value value) { - return toHumanString(value==null?null:value.get()); - } - public static String toHumanString(Text text) { - return toHumanString(text==null?null:text.copyBytes()); - } - public static String toHumanString(byte[] bytes) { - if (bytes==null) - return "{null}"; - StringBuilder sb = new StringBuilder(); - for (byte b : bytes) { - if ((b > 0x7e) || (b < 32)) { - sb.append("{"); - sb.append(Integer.toHexString( b & 0xff )); // Lop off the sign extended ones. - sb.append("}"); - } else if (b == '{'||b == '}') { // Escape the literal braces. - sb.append("{"); - sb.append((char)b); - sb.append("}"); - } else - sb.append((char)b); - } - return sb.toString(); - } - - } diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java deleted file mode 100644 index 1aecd98c6..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java +++ /dev/null @@ -1,63 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.Closeable; -import java.io.Flushable; -import java.io.IOException; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet.AccValueFactory; - -import org.apache.accumulo.core.client.TableNotFoundException; -import org.openrdf.model.Value; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; - - - -public interface PrecompQueryIndexer extends Closeable, Flushable { - - - public abstract void storeBindingSet(BindingSet bs) throws IOException ; - - public abstract void storeBindingSets(Collection bindingSets) - throws IOException, IllegalArgumentException; - - - public abstract CloseableIteration queryPrecompJoin(List varOrder, - String localityGroup, Map bindings, Map valMap, - Collection constraints) throws QueryEvaluationException,TableNotFoundException; - - - - @Override - public abstract void flush() throws IOException; - - @Override - public abstract void close() throws IOException; -} - - diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java b/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java deleted file mode 100644 index 646aab0a1..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java +++ /dev/null @@ -1,84 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.mongodb.MongoDBRdfConfiguration; -import mvm.rya.mongodb.MongoDBRyaDAO; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.hadoop.conf.Configuration; -import org.openrdf.sail.Sail; - -public class RyaSailFactory { - - - - public static Sail getInstance(Configuration conf) throws AccumuloException, - AccumuloSecurityException, RyaDAOException { - - return getRyaSail(conf); - } - - - - private static Sail getRyaSail(Configuration config) throws AccumuloException, AccumuloSecurityException, RyaDAOException { - - RdfCloudTripleStore store = new RdfCloudTripleStore(); - if (ConfigUtils.getUseMongo(config)) { - MongoDBRdfConfiguration conf = new MongoDBRdfConfiguration(config); - conf.setTablePrefix(config.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX)); - ConfigUtils.setIndexers(conf); - - MongoDBRyaDAO crdfdao = new MongoDBRyaDAO(conf); - crdfdao.init(); - - conf.setDisplayQueryPlan(true); - store.setRyaDAO(crdfdao); - } else { - Connector connector = ConfigUtils.getConnector(config); - AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); - crdfdao.setConnector(connector); - - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(config); - conf.setTablePrefix(config.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX)); // sets - // TablePrefixLayoutStrategy - ConfigUtils.setIndexers(conf); - conf.setDisplayQueryPlan(true); - - crdfdao.setConf(conf); - crdfdao.init(); - store.setRyaDAO(crdfdao); - } - - return store; - } - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java deleted file mode 100644 index ce94556c2..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java +++ /dev/null @@ -1,45 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; -import org.openrdf.model.Statement; -import org.openrdf.query.QueryEvaluationException; - -/** - * A function used to perform a search. - */ -public interface SearchFunction { - - /** - * Search the indices for the given terms and return {@link Statement}s that meet the {@link StatementContraints} - * - * @param searchTerms - * the search terms - * @param contraints - * the constraints on the returned {@link Statement}s - * @return - * @throws QueryEvaluationException - */ - public abstract CloseableIteration performSearch(String searchTerms, StatementContraints contraints) - throws QueryEvaluationException; - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java deleted file mode 100644 index 719cc2f76..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java +++ /dev/null @@ -1,71 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Map; - -import org.apache.log4j.Logger; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -import com.google.common.collect.Maps; - -public abstract class SearchFunctionFactory { - - private static final Logger logger = Logger.getLogger(SearchFunctionFactory.class); - - private final Map SEARCH_FUNCTION_MAP = Maps.newHashMap(); - - - /** - * Get a {@link GeoSearchFunction} for a give URI. - * - * @param searchFunction - * @return - */ - public SearchFunction getSearchFunction(final URI searchFunction) { - - SearchFunction geoFunc = null; - - try { - geoFunc = getSearchFunctionInternal(searchFunction); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - } - - return geoFunc; - } - - private SearchFunction getSearchFunctionInternal(final URI searchFunction) throws QueryEvaluationException { - SearchFunction sf = SEARCH_FUNCTION_MAP.get(searchFunction); - - if (sf != null) { - return sf; - } else { - throw new QueryEvaluationException("Unknown Search Function: " + searchFunction.stringValue()); - } - - - } - - -} - diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java b/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java deleted file mode 100644 index 437c74d07..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java +++ /dev/null @@ -1,73 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Set; - -import org.openrdf.model.Resource; -import org.openrdf.model.URI; - -public class StatementContraints { - private Resource context = null; - private Resource subject = null; - private Set predicates = null; - - public StatementContraints setContext(Resource context) { - this.context = context; - return this; - } - - public StatementContraints setPredicates(Set predicates) { - this.predicates = predicates; - return this; - } - - public StatementContraints setSubject(Resource subject) { - this.subject = subject; - return this; - } - - public Resource getContext() { - return context; - } - - public Set getPredicates() { - return predicates; - } - - public Resource getSubject() { - return subject; - } - - public boolean hasSubject() { - return subject != null; - } - - public boolean hasPredicates() { - return predicates != null && !predicates.isEmpty(); - } - - public boolean hasContext() { - return context != null; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java deleted file mode 100644 index be06e254b..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java +++ /dev/null @@ -1,183 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Set; - -import mvm.rya.api.persist.index.RyaSecondaryIndexer; - -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -/** - * A repository to store, index, and retrieve {@link Statement}s based on time. - * Instants: - * Instant {before, equals, after} Instant - * Instant {before, after, inside} Interval - * Instant {hasBeginning, hasEnd} Interval - * - * OWL-Time provides the interval relations: - *
- * 		intervalEquals, 
- * 		intervalBefore, 
- * 		intervalMeets, 
- * 		intervalOverlaps, 
- * 		intervalStarts, 
- * 		intervalDuring, 
- * 		intervalFinishes, 
- * 
- * and their reverse interval relations: 
- * 		intervalAfter, 
- * 		intervalMetBy, 
- * 		intervalOverlappedBy, 
- * 		intervalStartedBy, 
- * 		intervalContains, 
- * 		intervalFinishedBy.
- * 
- * from Allen paper in 1983 
- * 
- * Relation	Y Symbol Inverse Y
- * before    Y < > X 
- * equal     Y = = X 
- * meets     Y m mi X
- * overlaps  Y o oi X 
- * during    Y d di X   
- * starts    Y s si X 
- * finishes  Y f fi X
- * 
- * - */ - -public interface TemporalIndexer extends RyaSecondaryIndexer { - - /* consider ParseException here */ - - /*- - * - * And Now, what you you've all been waiting for, the queries: - * the instant versions: - * format: x {relation} y - * read: Given literal y, find all statements where the date object x is ( x relation y ) - * Instant {before, equals, after} Instant - * Instant {before, after, inside} Interval - * Instant {hasBeginning, hasEnd} Interval - * - * the Allen interval relations, as described above. - * intervalEquals, - * intervalBefore, - * intervalMeets, - * intervalOverlaps, - * intervalStarts, - * intervalDuring, - * intervalFinishes - * and then the inverses, including after. - */ - - public abstract CloseableIteration queryInstantEqualsInstant( - TemporalInstant queryInstant, StatementContraints contraints) - throws QueryEvaluationException;; - - public abstract CloseableIteration queryInstantBeforeInstant( - TemporalInstant queryInstant, StatementContraints contraints) - throws QueryEvaluationException;; - - public abstract CloseableIteration queryInstantAfterInstant( - TemporalInstant queryInstant, StatementContraints contraints) - throws QueryEvaluationException;; - - public abstract CloseableIteration queryInstantBeforeInterval( - TemporalInterval givenInterval, StatementContraints contraints) - throws QueryEvaluationException;; - - public abstract CloseableIteration queryInstantAfterInterval( - TemporalInterval givenInterval, StatementContraints contraints) - throws QueryEvaluationException; - - public abstract CloseableIteration queryInstantInsideInterval( - TemporalInterval givenInterval, StatementContraints contraints) - throws QueryEvaluationException; - - public abstract CloseableIteration queryInstantHasBeginningInterval( - TemporalInterval queryInterval, StatementContraints contraints) - throws QueryEvaluationException; - - public abstract CloseableIteration queryInstantHasEndInterval( - TemporalInterval queryInterval, StatementContraints contraints) - throws QueryEvaluationException; - - /** - * Returns statements that contain a time instance that is equal to the - * queried time and meet the {@link StatementContraints}. - * - * @param query - * the queried time instance - * @param contraints - * the {@link StatementContraints} - * @return - * @throws QueryEvaluationException - */ - public abstract CloseableIteration queryIntervalEquals( - TemporalInterval query, StatementContraints contraints) - throws QueryEvaluationException; - - /** - * Returns statements that contain a time instances that are before the - * queried {@link TemporalInterval} and meet the {@link StatementContraints} - * - * @param query - * the queried time instance - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryIntervalBefore( - TemporalInterval query, StatementContraints contraints) - throws QueryEvaluationException; - - /** - * Returns statements that contain a time instance that is after the queried {@link TemporalInterval} and meet the {@link StatementContraints}. - * - * @param query - * the queried time instance - * @param contraints - * the {@link StatementContraints} - * @return - */ - public abstract CloseableIteration queryIntervalAfter( - TemporalInterval query, StatementContraints contraints) - throws QueryEvaluationException; - - /* End of the Allen algebra queries */ - /** - * @return the set of predicates indexed by the indexer. - */ - public abstract Set getIndexablePredicates(); - - @Override - public abstract void flush() throws IOException; - - @Override - public abstract void close() throws IOException; -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java deleted file mode 100644 index f4e6d95f4..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java +++ /dev/null @@ -1,83 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.Serializable; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - -/** - * Time and date interface for building intervals. - * - *Implementations: - * Implementation should have a factory method for TemporalInterval since TemporalIntervals reference only this - * interface for begin & end, so it injects an implementation. - * public static TemporalInterval parseInterval(String dateTimeInterval) - * - * The following are notes and may not have been implemented. - * - * = rfc3339 - *https://www.ietf.org/rfc/rfc3339.txt - * a subset of ISO-8601 - * YYYY-MM-DDThh:mm:ss.fffZ - * Limits: - *All dates and times are assumed to be in the "current era", - somewhere between 0000AD and 9999AD. - * resolution: to the second, or millisecond if the optional fraction is used. - * - * = epoch - * 32bit or 64bit integer specifying the number of seconds since a standard date-time (1970) - * 32bit is good until 2038. - * 64bit is good until after the heat death of our universe - * - */ -public interface TemporalInstant extends Comparable, Serializable { - @Override - public boolean equals(Object obj) ; - - @Override - public int compareTo(TemporalInstant o) ; - - @Override - public int hashCode() ; - /** - * Get the date as a byte array. - */ - public byte[] getAsKeyBytes(); - /** - * Get the date as a String. - */ - public String getAsKeyString(); - /** - * Get the date as a human readable for reporting with timeZone. - */ - public String getAsReadable(DateTimeZone tz); - /** - * Get the date as a human readable for reporting, timeZone is implementation specific. - */ - public String getAsReadable(); - /** - * Get the date as a Joda/Java v8 DateTime. - */ - public DateTime getAsDateTime(); - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java deleted file mode 100644 index b23b99cb1..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java +++ /dev/null @@ -1,181 +0,0 @@ -package mvm.rya.indexing; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.UnsupportedEncodingException; - -/** - * A time with beginning and end date and time, which could be indefinitely in - * the past or future. Immutable, so it's thread safe. For use in reading and - * writing from Rya's temporal indexing scheme. - * - */ -public class TemporalInterval implements Comparable { - - // the beginning and end. Read-only because they are final references to immutable objects. - private final TemporalInstant hasBeginning; - private final TemporalInstant hasEnd; - - /** - * Separate the beginning and end with this. - * Used because Joda time library's interval uses this. - * TODO: Move this down to the TemporalInterval implementation. - * TODO: Then add a TemporalInterval.keyConcatenate(). - */ - public static final String DELIMITER = "/"; - -// /** -// * Empty constructor -- not allowed, no defaults. -// * For an infinite span of time: do it like this: -// * new TemporalInterval(TemporalInstantImpl.getMinimum, TemporalInstantImpl.getMaximum) -// */ -// public TemporalInterval() { -// hasBeginning = null; -// hasEnd = null; -// } - - /** - * Constructor setting beginning and end with an implementation of {@link TemporalInstant}. - * beginning must be less than end. - * - * @param hasBeginning - * @param hasEnd - */ - public TemporalInterval(TemporalInstant hasBeginning, TemporalInstant hasEnd) { - super(); - if (hasBeginning != null && hasEnd != null && 0 < hasBeginning.compareTo(hasEnd)) - throw new IllegalArgumentException("The Beginning instance must not compare greater than the end."); - this.hasBeginning = hasBeginning; - this.hasEnd = hasEnd; - } - - /** - * @return the hasBeginning - */ - public TemporalInstant getHasBeginning() { - return hasBeginning; - } - - /** - * @return the hasEnd - */ - public TemporalInstant getHasEnd() { - return hasEnd; - } - - /** - * True if CompareTo() says equal (0) - */ - @Override - public boolean equals(Object other) { - return other instanceof TemporalInterval - && this.compareTo((TemporalInterval) other) == 0; - }; - - /** - * Compare beginnings, if the same then compare ends, or equal if beginnings equal and endings equal. - * Nulls represent infinity. - */ - @Override - public int compareTo(TemporalInterval other) { - int compBegins = this.hasBeginning.compareTo(other.hasBeginning); - if (0 == compBegins) - return this.hasEnd.compareTo(other.hasEnd); - else - return compBegins; - - } - - /** - * Hashcode for - */ - @Override - public int hashCode() { - if (hasBeginning == null) - if (hasEnd == null) - return 0; - else - return hasEnd.hashCode(); - else - return hashboth(this.hasBeginning.hashCode(), - this.hasEnd.hashCode()); - } - - /** - * Hashcode combining two string hashcodes. - */ - protected static int hashboth(int i1, int i2) { - // return (int) (( 1L * i1 * i2) ; % (1L + Integer.MAX_VALUE)); - // let the overflow happen. It won't throw an error. - return (i1 + i2); - } - - /** - * Get the key use for rowid for the beginning of the interval. Use ascii - * for conversion to catch and prevent multi-byte chars. - * - * @return - */ - public byte[] getAsKeyBeginning() { - try { - return (hasBeginning.getAsKeyString() + DELIMITER + hasEnd - .getAsKeyString()).getBytes("US-ASCII"); - } catch (UnsupportedEncodingException e) { - // this is a code error, the strings are mostly numbers. - throw new Error("while converting key string to ascii bytes", e); - } - } - - /** - * get the key used for indexing the end of the interval. Use ascii for - * conversion to catch and prevent multi-byte chars. - * - * @return - */ - public byte[] getAsKeyEnd() { - try { - return (hasEnd.getAsKeyString() + DELIMITER + hasBeginning - .getAsKeyString()).getBytes("US-ASCII"); - } catch (UnsupportedEncodingException e) { - // this is a code error, the strings are mostly numbers and ascii - // symbols. - throw new Error("while converting key string to ascii bytes", e); - } - } - - /** - * Format as a "period" in this paper. This is not a standard, really. - * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.298.8948&rep=rep1&type=pdf - * also consider using the typed literal syntax: - * "[2010-01-01,2010-01-31]"^^xs:period - * @return [begindate,enddate] for example: [2010-01-01,2010-01-31] - * - */ - public String getAsPair() { - return "["+hasBeginning.getAsReadable() + "," + hasEnd.getAsReadable() + "]"; - } - - @Override - public String toString() { - return getAsPair() ; - // return hasBeginning.getAsReadable() + DELIMITER + hasEnd.getAsReadable(); - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java deleted file mode 100644 index ae16062b5..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java +++ /dev/null @@ -1,424 +0,0 @@ -package mvm.rya.indexing.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.indexing.FilterFunctionOptimizer; -import mvm.rya.indexing.accumulo.entity.EntityCentricIndex; -import mvm.rya.indexing.accumulo.entity.EntityOptimizer; -import mvm.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer; -import mvm.rya.indexing.accumulo.freetext.LuceneTokenizer; -import mvm.rya.indexing.accumulo.freetext.Tokenizer; -import mvm.rya.indexing.accumulo.geo.GeoMesaGeoIndexer; -import mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer; -import mvm.rya.indexing.external.PrecompJoinOptimizer; -import mvm.rya.indexing.mongodb.MongoGeoIndexer; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.MultiTableBatchWriter; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.commons.lang.Validate; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.util.ReflectionUtils; -import org.apache.log4j.Logger; -import org.openrdf.model.URI; -import org.openrdf.model.impl.URIImpl; - -import com.google.common.collect.Lists; - -/** - * A set of configuration utils to read a Hadoop {@link Configuration} object and create Cloudbase/Accumulo objects. - */ -public class ConfigUtils { - private static final Logger logger = Logger.getLogger(ConfigUtils.class); - - public static final String CLOUDBASE_TBL_PREFIX = "sc.cloudbase.tableprefix"; - public static final String CLOUDBASE_AUTHS = "sc.cloudbase.authorizations"; - public static final String CLOUDBASE_INSTANCE = "sc.cloudbase.instancename"; - public static final String CLOUDBASE_ZOOKEEPERS = "sc.cloudbase.zookeepers"; - public static final String CLOUDBASE_USER = "sc.cloudbase.username"; - public static final String CLOUDBASE_PASSWORD = "sc.cloudbase.password"; - - public static final String CLOUDBASE_WRITER_MAX_WRITE_THREADS = "sc.cloudbase.writer.maxwritethreads"; - public static final String CLOUDBASE_WRITER_MAX_LATENCY = "sc.cloudbase.writer.maxlatency"; - public static final String CLOUDBASE_WRITER_MAX_MEMORY = "sc.cloudbase.writer.maxmemory"; - - public static final String FREE_TEXT_QUERY_TERM_LIMIT = "sc.freetext.querytermlimit"; - - public static final String FREE_TEXT_DOC_TABLENAME = "sc.freetext.doctable"; - public static final String FREE_TEXT_TERM_TABLENAME = "sc.freetext.termtable"; - public static final String GEO_TABLENAME = "sc.geo.table"; - public static final String GEO_NUM_PARTITIONS = "sc.geo.numPartitions"; - public static final String TEMPORAL_TABLENAME = "sc.temporal.index"; - public static final String ENTITY_TABLENAME = "sc.entity.index"; - - public static final String USE_GEO = "sc.use_geo"; - public static final String USE_FREETEXT = "sc.use_freetext"; - public static final String USE_TEMPORAL = "sc.use_temporal"; - public static final String USE_ENTITY = "sc.use_entity"; - public static final String USE_PCJ = "sc.use_pcj"; - public static final String USE_OPTIMAL_PCJ = "sc.use.optimal.pcj"; - - public static final String USE_INDEXING_SAIL = "sc.use.indexing.sail"; - public static final String USE_EXTERNAL_SAIL = "sc.use.external.sail"; - - public static final String USE_MOCK_INSTANCE = ".useMockInstance"; - - public static final String NUM_PARTITIONS = "sc.cloudbase.numPartitions"; - - private static final int WRITER_MAX_WRITE_THREADS = 1; - private static final long WRITER_MAX_LATNECY = Long.MAX_VALUE; - private static final long WRITER_MAX_MEMORY = 10000L; - - public static final String DISPLAY_QUERY_PLAN = "query.printqueryplan"; - - public static final String FREETEXT_PREDICATES_LIST = "sc.freetext.predicates"; - public static final String FREETEXT_DOC_NUM_PARTITIONS = "sc.freetext.numPartitions.text"; - public static final String FREETEXT_TERM_NUM_PARTITIONS = "sc.freetext.numPartitions.term"; - - public static final String TOKENIZER_CLASS = "sc.freetext.tokenizer.class"; - - public static final String GEO_PREDICATES_LIST = "sc.geo.predicates"; - - public static final String TEMPORAL_PREDICATES_LIST = "sc.temporal.predicates"; - - public static final String USE_MONGO = "sc.useMongo"; - - public static boolean isDisplayQueryPlan(Configuration conf){ - return conf.getBoolean(DISPLAY_QUERY_PLAN, false); - } - - /** - * get a value from the configuration file and throw an exception if the value does not exist. - * - * @param conf - * @param key - * @return - */ - private static String getStringCheckSet(Configuration conf, String key) { - String value = conf.get(key); - Validate.notNull(value, key + " not set"); - return value; - } - - /** - * @param conf - * @param tablename - * @return if the table was created - * @throws AccumuloException - * @throws AccumuloSecurityException - * @throws TableExistsException - */ - public static boolean createTableIfNotExists(Configuration conf, String tablename) throws AccumuloException, AccumuloSecurityException, - TableExistsException { - TableOperations tops = getConnector(conf).tableOperations(); - if (!tops.exists(tablename)) { - logger.info("Creating table: " + tablename); - tops.create(tablename); - return true; - } - return false; - } - - private static String getIndexTableName(Configuration conf, String indexTableNameConf, String altSuffix){ - String value = conf.get(indexTableNameConf); - if (value == null){ - String defaultTableName = conf.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX); - Validate.notNull(defaultTableName, indexTableNameConf + " not set and " + RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX + " not set. Cannot generate table name."); - value = conf.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX) + altSuffix; - } - return value; - } - - public static String getFreeTextDocTablename(Configuration conf) { - return getIndexTableName(conf, FREE_TEXT_DOC_TABLENAME, "freetext"); - } - - public static String getFreeTextTermTablename(Configuration conf) { - return getIndexTableName(conf, FREE_TEXT_TERM_TABLENAME, "freetext_term"); - } - - public static int getFreeTextTermLimit(Configuration conf) { - return conf.getInt(FREE_TEXT_QUERY_TERM_LIMIT, 100); - } - - public static String getGeoTablename(Configuration conf) { - return getIndexTableName(conf, GEO_TABLENAME, "geo"); - } - - public static String getTemporalTableName(Configuration conf) { - return getIndexTableName(conf, TEMPORAL_TABLENAME, "temporal"); - } - - - public static String getEntityTableName(Configuration conf) { - return getIndexTableName(conf, ENTITY_TABLENAME, "entity"); - } - - - public static Set getFreeTextPredicates(Configuration conf) { - return getPredicates(conf, FREETEXT_PREDICATES_LIST); - } - - public static Set getGeoPredicates(Configuration conf) { - return getPredicates(conf, GEO_PREDICATES_LIST); - } - /** - * Used for indexing statements about date & time instances and intervals. - * @param conf - * @return Set of predicate URI's whose objects should be date time literals. - */ - public static Set getTemporalPredicates(Configuration conf) { - return getPredicates(conf, TEMPORAL_PREDICATES_LIST); - } - - private static Set getPredicates(Configuration conf, String confName) { - String[] validPredicateStrings = conf.getStrings(confName, new String[] {}); - Set predicates = new HashSet(); - for (String prediateString : validPredicateStrings) { - predicates.add(new URIImpl(prediateString)); - } - return predicates; - } - - public static Tokenizer getFreeTextTokenizer(Configuration conf) { - Class c = conf.getClass(TOKENIZER_CLASS, LuceneTokenizer.class, Tokenizer.class); - return ReflectionUtils.newInstance(c, conf); - } - - public static BatchWriter createDefaultBatchWriter(String tablename, Configuration conf) throws TableNotFoundException, - AccumuloException, AccumuloSecurityException { - Long DEFAULT_MAX_MEMORY = getWriterMaxMemory(conf); - Long DEFAULT_MAX_LATENCY = getWriterMaxLatency(conf); - Integer DEFAULT_MAX_WRITE_THREADS = getWriterMaxWriteThreads(conf); - Connector connector = ConfigUtils.getConnector(conf); - return connector.createBatchWriter(tablename, DEFAULT_MAX_MEMORY, DEFAULT_MAX_LATENCY, DEFAULT_MAX_WRITE_THREADS); - } - - public static MultiTableBatchWriter createMultitableBatchWriter(Configuration conf) throws AccumuloException, AccumuloSecurityException { - Long DEFAULT_MAX_MEMORY = getWriterMaxMemory(conf); - Long DEFAULT_MAX_LATENCY = getWriterMaxLatency(conf); - Integer DEFAULT_MAX_WRITE_THREADS = getWriterMaxWriteThreads(conf); - Connector connector = ConfigUtils.getConnector(conf); - return connector.createMultiTableBatchWriter(DEFAULT_MAX_MEMORY, DEFAULT_MAX_LATENCY, DEFAULT_MAX_WRITE_THREADS); - } - - public static Scanner createScanner(String tablename, Configuration conf) throws AccumuloException, AccumuloSecurityException, - TableNotFoundException { - Connector connector = ConfigUtils.getConnector(conf); - Authorizations auths = ConfigUtils.getAuthorizations(conf); - return connector.createScanner(tablename, auths); - - } - - public static BatchScanner createBatchScanner(String tablename, Configuration conf) throws AccumuloException, AccumuloSecurityException, - TableNotFoundException { - Connector connector = ConfigUtils.getConnector(conf); - Authorizations auths = ConfigUtils.getAuthorizations(conf); - Integer numThreads = null; - if (conf instanceof RdfCloudTripleStoreConfiguration) - numThreads = ((RdfCloudTripleStoreConfiguration) conf).getNumThreads(); - else - numThreads = conf.getInt(RdfCloudTripleStoreConfiguration.CONF_NUM_THREADS, 2); - return connector.createBatchScanner(tablename, auths, numThreads); - } - - public static int getWriterMaxWriteThreads(Configuration conf) { - return conf.getInt(CLOUDBASE_WRITER_MAX_WRITE_THREADS, WRITER_MAX_WRITE_THREADS); - } - - public static long getWriterMaxLatency(Configuration conf) { - return conf.getLong(CLOUDBASE_WRITER_MAX_LATENCY, WRITER_MAX_LATNECY); - } - - public static long getWriterMaxMemory(Configuration conf) { - return conf.getLong(CLOUDBASE_WRITER_MAX_MEMORY, WRITER_MAX_MEMORY); - } - - public static String getUsername(JobContext job) { - return getUsername(job.getConfiguration()); - } - - public static String getUsername(Configuration conf) { - return conf.get(CLOUDBASE_USER); - } - - public static Authorizations getAuthorizations(JobContext job) { - return getAuthorizations(job.getConfiguration()); - } - - public static Authorizations getAuthorizations(Configuration conf) { - String authString = conf.get(CLOUDBASE_AUTHS, ""); - if (authString.isEmpty()) { - return new Authorizations(); - } - return new Authorizations(authString.split(",")); - } - - public static Instance getInstance(JobContext job) { - return getInstance(job.getConfiguration()); - } - - public static Instance getInstance(Configuration conf) { - if (useMockInstance(conf)) { - return new MockInstance(conf.get(CLOUDBASE_INSTANCE)); - } - return new ZooKeeperInstance(conf.get(CLOUDBASE_INSTANCE), conf.get(CLOUDBASE_ZOOKEEPERS)); - } - - public static String getPassword(JobContext job) { - return getPassword(job.getConfiguration()); - } - - public static String getPassword(Configuration conf) { - return conf.get(CLOUDBASE_PASSWORD, ""); - } - - public static Connector getConnector(JobContext job) throws AccumuloException, AccumuloSecurityException { - return getConnector(job.getConfiguration()); - } - - public static Connector getConnector(Configuration conf) throws AccumuloException, AccumuloSecurityException { - Instance instance = ConfigUtils.getInstance(conf); - - return instance.getConnector(getUsername(conf), getPassword(conf)); - } - - public static boolean useMockInstance(Configuration conf) { - return conf.getBoolean(USE_MOCK_INSTANCE, false); - } - - private static int getNumPartitions(Configuration conf) { - return conf.getInt(NUM_PARTITIONS, 25); - } - - public static int getFreeTextDocNumPartitions(Configuration conf) { - return conf.getInt(FREETEXT_DOC_NUM_PARTITIONS, getNumPartitions(conf)); - } - - public static int getFreeTextTermNumPartitions(Configuration conf) { - return conf.getInt(FREETEXT_TERM_NUM_PARTITIONS, getNumPartitions(conf)); - } - - public static int getGeoNumPartitions(Configuration conf) { - return conf.getInt(GEO_NUM_PARTITIONS, getNumPartitions(conf)); - } - - public static boolean getUseGeo(Configuration conf) { - return conf.getBoolean(USE_GEO, false); - } - - public static boolean getUseFreeText(Configuration conf) { - return conf.getBoolean(USE_FREETEXT, false); - } - - public static boolean getUseTemporal(Configuration conf) { - return conf.getBoolean(USE_TEMPORAL, false); - } - - public static boolean getUseEntity(Configuration conf) { - return conf.getBoolean(USE_ENTITY, false); - } - - public static boolean getUsePCJ(Configuration conf) { - return conf.getBoolean(USE_PCJ, false); - } - - public static boolean getUseOptimalPCJ(Configuration conf) { - return conf.getBoolean(USE_OPTIMAL_PCJ, false); - } - - public static boolean getUseMongo(Configuration conf) { - return conf.getBoolean(USE_MONGO, false); - } - - - public static void setIndexers(RdfCloudTripleStoreConfiguration conf) { - - List indexList = Lists.newArrayList(); - List optimizers = Lists.newArrayList(); - - boolean useFilterIndex = false; - - if (ConfigUtils.getUseMongo(conf)) { - if (getUseGeo(conf)) { - indexList.add(MongoGeoIndexer.class.getName()); - useFilterIndex = true; - } - } else { - - if (getUsePCJ(conf) || getUseOptimalPCJ(conf)) { - conf.setPcjOptimizer(PrecompJoinOptimizer.class); - } - - if (getUseGeo(conf)) { - indexList.add(GeoMesaGeoIndexer.class.getName()); - useFilterIndex = true; - } - - if (getUseFreeText(conf)) { - indexList.add(AccumuloFreeTextIndexer.class.getName()); - useFilterIndex = true; - } - - if (getUseTemporal(conf)) { - indexList.add(AccumuloTemporalIndexer.class.getName()); - useFilterIndex = true; - } - - } - - if (useFilterIndex) { - optimizers.add(FilterFunctionOptimizer.class.getName()); - } - - if (getUseEntity(conf)) { - indexList.add(EntityCentricIndex.class.getName()); - optimizers.add(EntityOptimizer.class.getName()); - - } - - conf.setStrings(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS, indexList.toArray(new String[]{})); - conf.setStrings(AccumuloRdfConfiguration.CONF_OPTIMIZERS, optimizers.toArray(new String[]{})); - - } - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java deleted file mode 100644 index 8fa300867..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java +++ /dev/null @@ -1,45 +0,0 @@ -package mvm.rya.indexing.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.data.Value; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.codec.binary.StringUtils; -import org.apache.commons.codec.digest.DigestUtils; - -/** - * Utility methods for generating hashes. Note that MD5 is 16 bytes, or 32 Hex chars. To make it smaller (but still printable), this class - * Base64 encodes those 16 bytes into 22 chars. - */ -public class Md5Hash { - public static String md5Base64(byte[] data) { - return Base64.encodeBase64URLSafeString(DigestUtils.md5(data)); - } - - public static String md5Base64(String string) { - return md5Base64(StringUtils.getBytesUtf8(string)); - } - - public static byte[] md5Binary(Value value) { - return DigestUtils.md5(value.get()); - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java deleted file mode 100644 index f5d6d0e09..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java +++ /dev/null @@ -1,227 +0,0 @@ -package mvm.rya.indexing.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.Set; - -import mvm.rya.indexing.StatementContraints; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.Validate; -import org.openrdf.model.Literal; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; - -/** - * A set of Utilities to serialize {@link Statement}s to/from {@link String}s. - */ -public class StatementSerializer { - private static String SEP = "\u0000"; - - private static ValueFactory VALUE_FACTORY = new ValueFactoryImpl(); - - /** - * Read a {@link Statement} from a {@link String} - * - * @param in - * the {@link String} to parse - * @return a {@link Statement} - */ - public static Statement readStatement(String in) throws IOException { - String[] parts = in.split(SEP); - - if (parts.length != 4) { - throw new IOException("Not a valid statement: " + in); - } - - String contextString = parts[0]; - String subjectString = parts[1]; - String predicateString = parts[2]; - String objectString = parts[3]; - return readStatement(subjectString, predicateString, objectString, contextString); - } - - public static Statement readStatement(String subjectString, String predicateString, String objectString) { - return readStatement(subjectString, predicateString, objectString, ""); - } - - public static Statement readStatement(String subjectString, String predicateString, String objectString, String contextString) { - Resource subject = createResource(subjectString); - URI predicate = VALUE_FACTORY.createURI(predicateString); - - boolean isObjectLiteral = objectString.startsWith("\""); - - Value object = null; - if (isObjectLiteral) { - object = parseLiteral(objectString); - } else { - object = createResource(objectString); - } - - if (contextString == null || contextString.isEmpty()) { - return new StatementImpl(subject, predicate, object); - } else { - Resource context = VALUE_FACTORY.createURI(contextString); - return new ContextStatementImpl(subject, predicate, object, context); - } - } - - private static Resource createResource(String str) { - if (str.startsWith("_")) { - return VALUE_FACTORY.createBNode(str.substring(2)); - } - return VALUE_FACTORY.createURI(str); - - } - - private static Literal parseLiteral(String fullLiteralString) { - Validate.notNull(fullLiteralString); - Validate.isTrue(fullLiteralString.length() > 1); - - if (fullLiteralString.endsWith("\"")) { - String fullLiteralWithoutQuotes = fullLiteralString.substring(1, fullLiteralString.length() - 1); - return VALUE_FACTORY.createLiteral(fullLiteralWithoutQuotes, (String) null); - } else { - - // find the closing quote - int labelEnd = fullLiteralString.lastIndexOf("\""); - - String label = fullLiteralString.substring(1, labelEnd); - - String data = fullLiteralString.substring(labelEnd + 1); - - if (data.startsWith("@")) { - // the data is "language" - String lang = data.substring(1); - return VALUE_FACTORY.createLiteral(label, lang); - } else if (data.startsWith("^^<")) { - // the data is a "datatype" - String datatype = data.substring(3, data.length() - 1); - URI datatypeUri = VALUE_FACTORY.createURI(datatype); - return VALUE_FACTORY.createLiteral(label, datatypeUri); - } - } - return null; - - } - - public static String writeSubject(Statement statement) { - return statement.getSubject().toString(); - } - - public static String writeObject(Statement statement) { - return statement.getObject().toString(); - } - - public static String writePredicate(Statement statement) { - return statement.getPredicate().toString(); - } - - public static String writeSubjectPredicate(Statement statement) { - Validate.notNull(statement); - Validate.notNull(statement.getSubject()); - Validate.notNull(statement.getPredicate()); - return statement.getSubject().toString() + SEP + statement.getPredicate().toString(); - } - - public static String writeContext(Statement statement) { - if (statement.getContext() == null) { - return ""; - } - return statement.getContext().toString(); - } - - /** - * Write a {@link Statement} to a {@link String} - * - * @param statement - * the {@link Statement} to write - * @return a {@link String} representation of the statement - */ - public static String writeStatement(Statement statement) { - Resource subject = statement.getSubject(); - Resource context = statement.getContext(); - URI predicate = statement.getPredicate(); - Value object = statement.getObject(); - - Validate.notNull(subject); - Validate.notNull(predicate); - Validate.notNull(object); - - String s = ""; - if (context == null) { - s = SEP + subject.toString() + SEP + predicate.toString() + SEP + object.toString(); - } else { - s = context.toString() + SEP + subject.toString() + SEP + predicate.toString() + SEP + object.toString(); - } - return s; - } - - /** - * Creates a Regular Expression to match serialized statements meeting these constraints. A null or empty parameters imply - * no constraint. A null return value implies no constraints. - * - * @param context - * context constraint - * @param subject - * subject constraint - * @param predicates - * list of predicate constraints - * @return a regular expression that can be used to match serialized statements. A null return value implies no - * constraints. - */ - public static String createStatementRegex(StatementContraints contraints) { - Resource context = contraints.getContext(); - Resource subject = contraints.getSubject(); - Set predicates = contraints.getPredicates(); - if (context == null && subject == null && (predicates == null || predicates.isEmpty())) { - return null; - } - - // match on anything but a separator - String anyReg = "[^" + SEP + "]*"; - - // if context is empty, match on any context - String contextReg = (context == null) ? anyReg : context.stringValue(); - - // if subject is empty, match on any subject - String subjectReg = (subject == null) ? anyReg : subject.stringValue(); - - // if the predicates are empty, match on any predicate. Otherwise, "or" the predicates. - String predicateReg = ""; - if (predicates == null || predicates.isEmpty()) { - predicateReg = anyReg; - } else { - predicateReg = "(" + StringUtils.join(predicates, "|") + ")"; - } - - return "^" + contextReg + SEP + subjectReg + SEP + predicateReg + SEP + ".*"; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java deleted file mode 100644 index feb894f2b..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java +++ /dev/null @@ -1,450 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTE; -import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE; -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map.Entry; -import java.util.NoSuchElementException; -import java.util.Set; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.documentIndex.DocIndexIteratorUtil; -import mvm.rya.accumulo.documentIndex.DocumentIndexIntersectingIterator; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.api.resolver.RyaTypeResolverException; -import mvm.rya.indexing.DocIdIndexer; -import mvm.rya.indexing.accumulo.ConfigUtils; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.base.Preconditions; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Sets; -import com.google.common.primitives.Bytes; - -public class AccumuloDocIdIndexer implements DocIdIndexer { - - - - private BatchScanner bs; - private AccumuloRdfConfiguration conf; - - public AccumuloDocIdIndexer(RdfCloudTripleStoreConfiguration conf) throws AccumuloException, AccumuloSecurityException { - Preconditions.checkArgument(conf instanceof RdfCloudTripleStoreConfiguration, "conf must be isntance of RdfCloudTripleStoreConfiguration"); - this.conf = (AccumuloRdfConfiguration) conf; - //Connector conn = ConfigUtils.getConnector(conf); - } - - - - - public CloseableIteration queryDocIndex(String sparqlQuery, - Collection constraints) throws TableNotFoundException, QueryEvaluationException { - - SPARQLParser parser = new SPARQLParser(); - ParsedQuery pq1 = null; - try { - pq1 = parser.parseQuery(sparqlQuery, null); - } catch (MalformedQueryException e) { - e.printStackTrace(); - } - - TupleExpr te1 = pq1.getTupleExpr(); - List spList1 = StatementPatternCollector.process(te1); - - if(StarQuery.isValidStarQuery(spList1)) { - StarQuery sq1 = new StarQuery(spList1); - return queryDocIndex(sq1, constraints); - } else { - throw new IllegalArgumentException("Invalid star query!"); - } - - } - - - - - @Override - public CloseableIteration queryDocIndex(StarQuery query, - Collection constraints) throws TableNotFoundException, QueryEvaluationException { - - final StarQuery starQ = query; - final Iterator bs = constraints.iterator(); - final Iterator bs2 = constraints.iterator(); - final Set unCommonVarNames; - final Set commonVarNames; - if (bs2.hasNext()) { - BindingSet currBs = bs2.next(); - commonVarNames = StarQuery.getCommonVars(query, currBs); - unCommonVarNames = Sets.difference(currBs.getBindingNames(), commonVarNames); - } else { - commonVarNames = Sets.newHashSet(); - unCommonVarNames = Sets.newHashSet(); - } - - if( commonVarNames.size() == 1 && !query.commonVarConstant() && commonVarNames.contains(query.getCommonVarName())) { - - final HashMultimap map = HashMultimap.create(); - final String commonVar = starQ.getCommonVarName(); - final Iterator> intersections; - final BatchScanner scan; - Set ranges = Sets.newHashSet(); - - while(bs.hasNext()) { - - BindingSet currentBs = bs.next(); - - if(currentBs.getBinding(commonVar) == null) { - continue; - } - - String row = currentBs.getBinding(commonVar).getValue().stringValue(); - ranges.add(new Range(row)); - map.put(row, currentBs); - - } - scan = runQuery(starQ, ranges); - intersections = scan.iterator(); - - - return new CloseableIteration() { - - - private QueryBindingSet currentSolutionBs = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - private Iterator inputSet = (new ArrayList()).iterator(); - private BindingSet currentBs; - private Key key; - - - - @Override - public boolean hasNext() throws QueryEvaluationException { - if (!hasNextCalled && !isEmpty) { - while (inputSet.hasNext() || intersections.hasNext()) { - if (!inputSet.hasNext()) { - key = intersections.next().getKey(); - inputSet = map.get(key.getRow().toString()).iterator(); - } - currentBs = inputSet.next(); - currentSolutionBs = deserializeKey(key, starQ, currentBs, unCommonVarNames); - - if (currentSolutionBs.size() == unCommonVarNames.size() + starQ.getUnCommonVars().size() +1) { - hasNextCalled = true; - return true; - } - - } - - isEmpty = true; - return false; - - } else if (isEmpty) { - return false; - } else { - return true; - } - - } - - - @Override - public BindingSet next() throws QueryEvaluationException { - - if (hasNextCalled) { - hasNextCalled = false; - } else if (isEmpty) { - throw new NoSuchElementException(); - } else { - if (this.hasNext()) { - hasNextCalled = false; - } else { - throw new NoSuchElementException(); - } - } - - return currentSolutionBs; - } - - @Override - public void remove() throws QueryEvaluationException { - throw new UnsupportedOperationException(); - } - - @Override - public void close() throws QueryEvaluationException { - scan.close(); - } - - }; - - - } else { - - return new CloseableIteration() { - - @Override - public void remove() throws QueryEvaluationException { - throw new UnsupportedOperationException(); - } - - private Iterator> intersections = null; - private QueryBindingSet currentSolutionBs = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - private boolean init = false; - private BindingSet currentBs; - private StarQuery sq = new StarQuery(starQ); - private Set emptyRangeSet = Sets.newHashSet(); - private BatchScanner scan; - - @Override - public BindingSet next() throws QueryEvaluationException { - if (hasNextCalled) { - hasNextCalled = false; - } else if (isEmpty) { - throw new NoSuchElementException(); - } else { - if (this.hasNext()) { - hasNextCalled = false; - } else { - throw new NoSuchElementException(); - } - } - return currentSolutionBs; - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - - if (!init) { - if (intersections == null && bs.hasNext()) { - currentBs = bs.next(); - sq = StarQuery.getConstrainedStarQuery(sq, currentBs); - scan = runQuery(sq,emptyRangeSet); - intersections = scan.iterator(); - // binding set empty - } else if (intersections == null && !bs.hasNext()) { - currentBs = new QueryBindingSet(); - scan = runQuery(starQ,emptyRangeSet); - intersections = scan.iterator(); - } - - init = true; - } - - if (!hasNextCalled && !isEmpty) { - while (intersections.hasNext() || bs.hasNext()) { - if (!intersections.hasNext()) { - scan.close(); - currentBs = bs.next(); - sq = StarQuery.getConstrainedStarQuery(sq, currentBs); - scan = runQuery(sq,emptyRangeSet); - intersections = scan.iterator(); - } - if (intersections.hasNext()) { - currentSolutionBs = deserializeKey(intersections.next().getKey(), sq, currentBs, - unCommonVarNames); - } else { - continue; - } - - if (sq.commonVarConstant() && currentSolutionBs.size() == unCommonVarNames.size() + sq.getUnCommonVars().size()) { - hasNextCalled = true; - return true; - } else if(currentSolutionBs.size() == unCommonVarNames.size() + sq.getUnCommonVars().size() + 1) { - hasNextCalled = true; - return true; - } - } - - isEmpty = true; - return false; - - } else if (isEmpty) { - return false; - } else { - return true; - } - } - - @Override - public void close() throws QueryEvaluationException { - scan.close(); - } - }; - } - } - - private QueryBindingSet deserializeKey(Key key, StarQuery sq, BindingSet currentBs, Set unCommonVar) { - - - QueryBindingSet currentSolutionBs = new QueryBindingSet(); - - Text row = key.getRow(); - Text cq = key.getColumnQualifier(); - - - String[] cqArray = cq.toString().split(DocIndexIteratorUtil.DOC_ID_INDEX_DELIM); - - boolean commonVarSet = false; - - //if common Var is constant there is no common variable to assign a value to - if(sq.commonVarConstant()) { - commonVarSet = true; - } - - if (!commonVarSet && sq.isCommonVarURI()) { - RyaURI rURI = new RyaURI(row.toString()); - currentSolutionBs.addBinding(sq.getCommonVarName(), - RyaToRdfConversions.convertValue(rURI)); - commonVarSet = true; - } - - for (String s : sq.getUnCommonVars()) { - - byte[] cqBytes = cqArray[sq.getVarPos().get(s)].getBytes(); - int firstIndex = Bytes.indexOf(cqBytes, DELIM_BYTE); - int secondIndex = Bytes.lastIndexOf(cqBytes, DELIM_BYTE); - int typeIndex = Bytes.indexOf(cqBytes, TYPE_DELIM_BYTE); - byte[] tripleComponent = Arrays.copyOfRange(cqBytes, firstIndex + 1, secondIndex); - byte[] cqContent = Arrays.copyOfRange(cqBytes, secondIndex + 1, typeIndex); - byte[] objType = Arrays.copyOfRange(cqBytes, typeIndex, cqBytes.length); - - if ((new String(tripleComponent)).equals("object")) { - byte[] object = Bytes.concat(cqContent, objType); - org.openrdf.model.Value v = null; - try { - v = RyaToRdfConversions.convertValue(RyaContext.getInstance().deserialize( - object)); - } catch (RyaTypeResolverException e) { - e.printStackTrace(); - } - currentSolutionBs.addBinding(s, v); - - } else if ((new String(tripleComponent)).equals("subject")) { - if (!commonVarSet) { - byte[] object = Bytes.concat(row.getBytes(), objType); - org.openrdf.model.Value v = null; - try { - v = RyaToRdfConversions.convertValue(RyaContext.getInstance().deserialize( - object)); - } catch (RyaTypeResolverException e) { - e.printStackTrace(); - } - currentSolutionBs.addBinding(sq.getCommonVarName(), v); - commonVarSet = true; - } - RyaURI rURI = new RyaURI(new String(cqContent)); - currentSolutionBs.addBinding(s, RyaToRdfConversions.convertValue(rURI)); - } else { - throw new IllegalArgumentException("Invalid row."); - } - } - for (String s : unCommonVar) { - currentSolutionBs.addBinding(s, currentBs.getValue(s)); - } - return currentSolutionBs; - } - - private BatchScanner runQuery(StarQuery query, Collection ranges) throws QueryEvaluationException { - - try { - if (ranges.size() == 0) { - String rangeText = query.getCommonVarValue(); - Range r; - if (rangeText != null) { - r = new Range(new Text(query.getCommonVarValue())); - } else { - r = new Range(); - } - ranges = Collections.singleton(r); - } - - Connector accCon = ConfigUtils.getConnector(conf); - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, query.getColumnCond()); - - if(query.hasContext()) { - DocumentIndexIntersectingIterator.setContext(is, query.getContextURI()); - } - bs = accCon.createBatchScanner(ConfigUtils.getEntityTableName(conf), - new Authorizations(conf.get(ConfigUtils.CLOUDBASE_AUTHS)), 15); - bs.addScanIterator(is); - bs.setRanges(ranges); - - return bs; - - } catch (TableNotFoundException e) { - e.printStackTrace(); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } - throw new QueryEvaluationException(); - } - - - @Override - public void close() throws IOException { - //TODO generate an exception when BS passed in -- scanner closed -// if (bs != null) { -// bs.close(); -// } - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java deleted file mode 100644 index b8b3f659a..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java +++ /dev/null @@ -1,252 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV; -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE; -import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES; -import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_BYTES; -import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT; - -import java.io.IOException; -import java.util.Collection; -import java.util.List; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.accumulo.experimental.AccumuloIndexer; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.MultiTableBatchWriter; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; -import org.openrdf.model.Statement; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.evaluation.impl.BindingAssigner; -import org.openrdf.query.algebra.evaluation.impl.CompareOptimizer; -import org.openrdf.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter; -import org.openrdf.query.algebra.evaluation.impl.ConstantOptimizer; -import org.openrdf.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer; -import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer; -import org.openrdf.query.algebra.evaluation.impl.IterativeEvaluationOptimizer; -import org.openrdf.query.algebra.evaluation.impl.OrderLimitOptimizer; -import org.openrdf.query.algebra.evaluation.impl.QueryModelNormalizer; -import org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer; - -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.google.common.primitives.Bytes; - -public class EntityCentricIndex extends AbstractAccumuloIndexer { - - private static final Logger logger = Logger.getLogger(EntityCentricIndex.class); - private static final String TABLE_SUFFIX = "EntityCentricIndex"; - - private AccumuloRdfConfiguration conf; - private BatchWriter writer; - private boolean isInit = false; - - public static final String CONF_TABLE_SUFFIX = "ac.indexer.eci.tablename"; - - - private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, IOException, - TableExistsException { - ConfigUtils.createTableIfNotExists(conf, ConfigUtils.getEntityTableName(conf)); - } - - - @Override - public Configuration getConf() { - return this.conf; - } - - //initialization occurs in setConf because index is created using reflection - @Override - public void setConf(Configuration conf) { - if (conf instanceof AccumuloRdfConfiguration) { - this.conf = (AccumuloRdfConfiguration) conf; - } else { - this.conf = new AccumuloRdfConfiguration(conf); - } - if (!isInit) { - try { - init(); - isInit = true; - } catch (AccumuloException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (AccumuloSecurityException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (TableNotFoundException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (TableExistsException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (IOException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } - } - } - - - @Override - public String getTableName() { - return ConfigUtils.getEntityTableName(conf); - } - - @Override - public void setMultiTableBatchWriter(MultiTableBatchWriter writer) throws IOException { - try { - this.writer = writer.getBatchWriter(getTableName()); - } catch (AccumuloException e) { - throw new IOException(e); - } catch (AccumuloSecurityException e) { - throw new IOException(e); - } catch (TableNotFoundException e) { - throw new IOException(e); - } - - } - - - public void storeStatement(RyaStatement stmt) throws IOException { - Preconditions.checkNotNull(writer, "BatchWriter not Set"); - try { - for (TripleRow row : serializeStatement(stmt)) { - writer.addMutation(createMutation(row)); - } - } catch (MutationsRejectedException e) { - throw new IOException(e); - } catch (RyaTypeResolverException e) { - throw new IOException(e); - } - } - - - public void deleteStatement(RyaStatement stmt) throws IOException { - Preconditions.checkNotNull(writer, "BatchWriter not Set"); - try { - for (TripleRow row : serializeStatement(stmt)) { - writer.addMutation(deleteMutation(row)); - } - } catch (MutationsRejectedException e) { - throw new IOException(e); - } catch (RyaTypeResolverException e) { - throw new IOException(e); - } - } - - - protected Mutation deleteMutation(TripleRow tripleRow) { - Mutation m = new Mutation(new Text(tripleRow.getRow())); - - byte[] columnFamily = tripleRow.getColumnFamily(); - Text cfText = columnFamily == null ? EMPTY_TEXT : new Text(columnFamily); - - byte[] columnQualifier = tripleRow.getColumnQualifier(); - Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier); - - m.putDelete(cfText, cqText, new ColumnVisibility(tripleRow.getColumnVisibility()), tripleRow.getTimestamp()); - return m; - } - - public static Collection createMutations(RyaStatement stmt) throws RyaTypeResolverException{ - Collection m = Lists.newArrayList(); - for (TripleRow tr : serializeStatement(stmt)){ - m.add(createMutation(tr)); - } - return m; - } - - private static Mutation createMutation(TripleRow tripleRow) { - Mutation mutation = new Mutation(new Text(tripleRow.getRow())); - byte[] columnVisibility = tripleRow.getColumnVisibility(); - ColumnVisibility cv = columnVisibility == null ? EMPTY_CV : new ColumnVisibility(columnVisibility); - Long timestamp = tripleRow.getTimestamp(); - byte[] value = tripleRow.getValue(); - Value v = value == null ? EMPTY_VALUE : new Value(value); - byte[] columnQualifier = tripleRow.getColumnQualifier(); - Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier); - byte[] columnFamily = tripleRow.getColumnFamily(); - Text cfText = columnFamily == null ? EMPTY_TEXT : new Text(columnFamily); - - mutation.put(cfText, cqText, cv, timestamp, v); - return mutation; - } - - private static List serializeStatement(RyaStatement stmt) throws RyaTypeResolverException { - RyaURI subject = stmt.getSubject(); - RyaURI predicate = stmt.getPredicate(); - RyaType object = stmt.getObject(); - RyaURI context = stmt.getContext(); - Long timestamp = stmt.getTimestamp(); - byte[] columnVisibility = stmt.getColumnVisibility(); - byte[] value = stmt.getValue(); - assert subject != null && predicate != null && object != null; - byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes(); - byte[] subjBytes = subject.getData().getBytes(); - byte[] predBytes = predicate.getData().getBytes(); - byte[][] objBytes = RyaContext.getInstance().serializeType(object); - - return Lists.newArrayList(new TripleRow(subjBytes, // - predBytes, // - Bytes.concat(cf, DELIM_BYTES, // - "object".getBytes(), DELIM_BYTES, // - objBytes[0], objBytes[1]), // - timestamp, // - columnVisibility, // - value// - ), - - new TripleRow(objBytes[0], // - predBytes, // - Bytes.concat(cf, DELIM_BYTES, // - "subject".getBytes(), DELIM_BYTES, // - subjBytes, objBytes[1]), // - timestamp, // - columnVisibility, // - value// - )); - } - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java deleted file mode 100644 index 2030e587e..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java +++ /dev/null @@ -1,171 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map.Entry; -import java.util.NoSuchElementException; -import java.util.Set; -import mvm.rya.indexing.accumulo.ConfigUtils; -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; - -public class EntityLocalityGroupSetter { - - - String tablePrefix; - Connector conn; - Configuration conf; - - public EntityLocalityGroupSetter(String tablePrefix, Connector conn, Configuration conf) { - this.conn = conn; - this.tablePrefix = tablePrefix; - this.conf = conf; - } - - - - private Iterator getPredicates() { - - String auths = conf.get(ConfigUtils.CLOUDBASE_AUTHS); - BatchScanner bs = null; - try { - bs = conn.createBatchScanner(tablePrefix + "prospects", new Authorizations(auths), 10); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } - bs.setRanges(Collections.singleton(Range.prefix(new Text("predicate" + "\u0000")))); - final Iterator> iter = bs.iterator(); - - return new Iterator() { - - private String next = null; - private boolean hasNextCalled = false; - private boolean isEmpty = false; - - @Override - public boolean hasNext() { - - if (!hasNextCalled && !isEmpty) { - while (iter.hasNext()) { - Entry temp = iter.next(); - String row = temp.getKey().getRow().toString(); - String[] rowArray = row.split("\u0000"); - next = rowArray[1]; - - hasNextCalled = true; - return true; - } - isEmpty = true; - return false; - } else if(isEmpty) { - return false; - }else { - return true; - } - } - - @Override - public String next() { - - if (hasNextCalled) { - hasNextCalled = false; - return next; - } else if(isEmpty) { - throw new NoSuchElementException(); - }else { - if (this.hasNext()) { - hasNextCalled = false; - return next; - } else { - throw new NoSuchElementException(); - } - } - } - - @Override - public void remove() { - - throw new UnsupportedOperationException("Cannot delete from iterator!"); - - } - - }; - } - - - - - - - - - public void setLocalityGroups() { - - HashMap> localityGroups = new HashMap>(); - Iterator groups = getPredicates(); - - int i = 1; - - while(groups.hasNext()) { - HashSet tempColumn = new HashSet(); - String temp = groups.next(); - tempColumn.add(new Text(temp)); - String groupName = "predicate" + i; - localityGroups.put(groupName, tempColumn); - i++; - } - - - try { - conn.tableOperations().setLocalityGroups(tablePrefix + "doc_partitioned_index", localityGroups); - //conn.tableOperations().compact(tablePrefix + "doc_partitioned_index", null, null, true, true); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } - - - - } - - - - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java deleted file mode 100644 index e46c321c2..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java +++ /dev/null @@ -1,436 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Set; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.api.persist.joinselect.SelectivityEvalDAO; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.joinselect.AccumuloSelectivityEvalDAO; -import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO; -import mvm.rya.rdftriplestore.inference.DoNotExpandSP; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -public class EntityOptimizer implements QueryOptimizer, Configurable { - - private SelectivityEvalDAO eval; - private RdfCloudTripleStoreConfiguration conf; - private boolean isEvalDaoSet = false; - - - public EntityOptimizer() { - - } - - public EntityOptimizer(RdfCloudTripleStoreConfiguration conf) { - if(conf.isUseStats() && conf.isUseSelectivity()) { - try { - eval = new AccumuloSelectivityEvalDAO(conf, ConfigUtils.getConnector(conf)); - ((AccumuloSelectivityEvalDAO)eval).setRdfEvalDAO(new ProspectorServiceEvalStatsDAO(ConfigUtils.getConnector(conf), conf)); - eval.init(); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } - - isEvalDaoSet = true; - } else { - eval = null; - isEvalDaoSet = true; - } - this.conf = conf; - } - - public EntityOptimizer(SelectivityEvalDAO eval) { - this.eval = eval; - this.conf = eval.getConf(); - isEvalDaoSet = true; - } - - @Override - public void setConf(Configuration conf) { - if(conf instanceof RdfCloudTripleStoreConfiguration) { - this.conf = (RdfCloudTripleStoreConfiguration) conf; - } else { - this.conf = new AccumuloRdfConfiguration(conf); - } - - if (!isEvalDaoSet) { - if(this.conf.isUseStats() && this.conf.isUseSelectivity()) { - try { - eval = new AccumuloSelectivityEvalDAO(this.conf, ConfigUtils.getConnector(this.conf)); - ((AccumuloSelectivityEvalDAO)eval).setRdfEvalDAO(new ProspectorServiceEvalStatsDAO(ConfigUtils.getConnector(this.conf), this.conf)); - eval.init(); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } - - isEvalDaoSet = true; - } else { - eval = null; - isEvalDaoSet = true; - } - } - - } - - @Override - public Configuration getConf() { - return conf; - } - - /** - * Applies generally applicable optimizations: path expressions are sorted - * from more to less specific. - * - * @param tupleExpr - */ - @Override - public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) { - tupleExpr.visit(new JoinVisitor()); - } - - protected class JoinVisitor extends QueryModelVisitorBase { - - @Override - public void meet(Join node) { - try { - if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) { - return; - } - List joinArgs = getJoinArgs(node, new ArrayList()); - HashMultimap varMap = getVarBins(joinArgs); - while (!varMap.keySet().isEmpty()) { - String s = getHighestPriorityKey(varMap); - constructTuple(varMap, joinArgs, s); - } - List filterChain = getFilterChain(joinArgs); - - for (TupleExpr te : joinArgs) { - if (!(te instanceof StatementPattern) || !(te instanceof EntityTupleSet)) { - te.visit(this); - } - } - // Replace old join hierarchy - node.replaceWith(getNewJoin(joinArgs, filterChain)); - - } catch (Exception e) { - e.printStackTrace(); - } - } - - private List getFilterChain(List joinArgs) { - List filterTopBottom = Lists.newArrayList(); - TupleExpr filterChainTop = null; - TupleExpr filterChainBottom = null; - - for(int i = 0; i < joinArgs.size(); i++) { - if(joinArgs.get(i) instanceof Filter) { - if(filterChainTop == null) { - filterChainTop = joinArgs.remove(i); - i--; - } else if(filterChainBottom == null){ - filterChainBottom = joinArgs.remove(i); - ((Filter)filterChainTop).setArg(filterChainBottom); - i--; - } else { - ((Filter)filterChainBottom).setArg(joinArgs.remove(i)); - filterChainBottom = ((Filter)filterChainBottom).getArg(); - i--; - } - } - } - if(filterChainTop != null) { - filterTopBottom.add(filterChainTop); - } - if(filterChainBottom != null) { - filterTopBottom.add(filterChainBottom); - } - return filterTopBottom; - } - - private TupleExpr getNewJoin(List joinArgs, List filterChain) { - TupleExpr newJoin; - - if (joinArgs.size() > 1) { - if (filterChain.size() > 0) { - TupleExpr finalJoinArg = joinArgs.remove(0); - TupleExpr tempJoin; - TupleExpr temp = filterChain.get(0); - - if (joinArgs.size() > 1) { - tempJoin = new Join(joinArgs.remove(0), joinArgs.remove(0)); - for (TupleExpr te : joinArgs) { - tempJoin = new Join(tempJoin, te); - } - } else { - tempJoin = joinArgs.remove(0); - } - - if (filterChain.size() == 1) { - ((Filter) temp).setArg(tempJoin); - } else { - ((Filter) filterChain.get(1)).setArg(tempJoin); - } - newJoin = new Join(temp, finalJoinArg); - } else { - newJoin = new Join(joinArgs.get(0), joinArgs.get(1)); - joinArgs.remove(0); - joinArgs.remove(0); - - for (TupleExpr te : joinArgs) { - newJoin = new Join(newJoin, te); - } - } - } else if (joinArgs.size() == 1) { - if (filterChain.size() > 0) { - newJoin = filterChain.get(0); - if (filterChain.size() == 1) { - ((Filter) newJoin).setArg(joinArgs.get(0)); - } else { - ((Filter) filterChain.get(1)).setArg(joinArgs.get(0)); - } - } else { - newJoin = joinArgs.get(0); - } - } else { - throw new IllegalStateException("JoinArgs size cannot be zero."); - } - return newJoin; - } - - private HashMultimap getVarBins(List nodes) { - - HashMultimap varMap = HashMultimap.create(); - - for (QueryModelNode node : nodes) { - if (node instanceof StatementPattern) { - StatementPattern sp = (StatementPattern) node; - if (sp.getPredicateVar().isConstant()) { - varMap.put(sp.getSubjectVar().getName(), sp); - varMap.put(sp.getObjectVar().getName(), sp); - } - } - } - - removeInvalidBins(varMap, true); - - return varMap; - } - - private void updateVarMap(HashMultimap varMap, Set bin) { - - for (StatementPattern sp : bin) { - varMap.remove(sp.getSubjectVar().getName(), sp); - varMap.remove(sp.getObjectVar().getName(), sp); - } - - removeInvalidBins(varMap, false); - - } - - private void removeInvalidBins(HashMultimap varMap, boolean newMap) { - - Set keys = Sets.newHashSet(varMap.keySet()); - - if (newMap) { - for (String s : keys) { - Set spSet = Sets.newHashSet(varMap.get(s)); - if (!StarQuery.isValidStarQuery(spSet)) { - for (StatementPattern sp : spSet) { - varMap.remove(s, sp); - } - } - - } - } else { - - for (String s : keys) { - Set spSet = Sets.newHashSet(varMap.get(s)); - if (spSet.size() == 1) { - for (StatementPattern sp : spSet) { - varMap.remove(s, sp); - } - } - - } - } - - } - - private void constructTuple(HashMultimap varMap, List joinArgs, - String binName) { - - Set bin = Sets.newHashSet(varMap.get(binName)); - StarQuery sq = new StarQuery(bin); - - updateVarMap(varMap, bin); - for (StatementPattern sp : bin) { - joinArgs.remove(sp); - } - - joinArgs.add(new EntityTupleSet(sq, conf)); - - } - - private String getHighestPriorityKey(HashMultimap varMap) { - - double tempPriority = -1; - double priority = -Double.MAX_VALUE; - String priorityKey = ""; - Set bin = null; - - Set keys = varMap.keySet(); - - for (String s : keys) { - bin = varMap.get(s); - tempPriority = bin.size(); - tempPriority *= getCardinality(bin); - tempPriority *= getMinCardSp(bin); - - // weight starQuery where common Var is constant slightly more -- this factor is subject - // to change - if(s.startsWith("-const-")) { - tempPriority *= 10; - } - if (tempPriority > priority) { - priority = tempPriority; - priorityKey = s; - } - } - return priorityKey; - } - - private double getMinCardSp(Collection nodes) { - - double cardinality = Double.MAX_VALUE; - double tempCard = -1; - - if (eval == null) { - return 1; - } - - for (StatementPattern sp : nodes) { - - try { - tempCard = eval.getCardinality(conf, sp); - - if (tempCard < cardinality) { - cardinality = tempCard; - - } - } catch (Exception e) { - e.printStackTrace(); - } - - } - - return cardinality; - - } - - private double getCardinality(Collection spNodes) { - - double cardinality = Double.MAX_VALUE; - double tempCard = -1; - - - if(eval == null) { - return 1; - } - - List nodes = Lists.newArrayList(spNodes); - - AccumuloSelectivityEvalDAO ase = (AccumuloSelectivityEvalDAO) eval; - ase.setDenormalized(true); - - try { - - for (int i = 0; i < nodes.size(); i++) { - for (int j = i + 1; j < nodes.size(); j++) { - tempCard = ase.getJoinSelect(conf, nodes.get(i), nodes.get(j)); - if (tempCard < cardinality) { - cardinality = tempCard; - } - } - } - - } catch (Exception e) { - e.printStackTrace(); - } - - ase.setDenormalized(false); - - return cardinality / (nodes.size() + 1); - - } - - protected > L getJoinArgs(TupleExpr tupleExpr, L joinArgs) { - if (tupleExpr instanceof Join) { - if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) - && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) { - Join join = (Join) tupleExpr; - getJoinArgs(join.getLeftArg(), joinArgs); - getJoinArgs(join.getRightArg(), joinArgs); - } - } else if(tupleExpr instanceof Filter) { - joinArgs.add(tupleExpr); - getJoinArgs(((Filter)tupleExpr).getArg(), joinArgs); - } else { - joinArgs.add(tupleExpr); - } - - return joinArgs; - } - - } - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java deleted file mode 100644 index dbe7a536e..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java +++ /dev/null @@ -1,264 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.entity.StarQuery.CardinalityStatementPattern; -import mvm.rya.joinselect.AccumuloSelectivityEvalDAO; -import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; -import mvm.rya.rdftriplestore.RdfCloudTripleStoreConnection; -import mvm.rya.rdftriplestore.evaluation.ExternalBatchingIterator; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.commons.io.IOUtils; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; -import org.openrdf.query.algebra.evaluation.impl.ExternalSet; -import org.openrdf.sail.SailException; - -import com.beust.jcommander.internal.Sets; -import com.google.common.base.Joiner; - -public class EntityTupleSet extends ExternalSet implements ExternalBatchingIterator { - - - private StarQuery starQuery; - private RdfCloudTripleStoreConfiguration conf; - private Set variables; - private double cardinality = -1; - private StatementPattern minSp; - private double minCard; - private Connector accCon = null; - private boolean evalOptUsed = false; - - public EntityTupleSet() { - - } - - public EntityTupleSet(StarQuery sq, RdfCloudTripleStoreConfiguration conf) { - this.starQuery = sq; - this.conf = conf; - - variables = Sets.newHashSet(); - if(!starQuery.commonVarConstant()) { - variables.add(starQuery.getCommonVarName()); - } - variables.addAll(starQuery.getUnCommonVars()); - - init(); - - } - - public EntityTupleSet(StarQuery sq, RdfCloudTripleStoreConfiguration conf, boolean evalOptUsed) { - this(sq,conf); - this.evalOptUsed = evalOptUsed; - } - - private void init() { - - try { - accCon = ConfigUtils.getConnector(conf); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } - if (conf.isUseStats() && conf.isUseSelectivity()) { - - ProspectorServiceEvalStatsDAO evalDao = new ProspectorServiceEvalStatsDAO(accCon, conf); - evalDao.init(); - AccumuloSelectivityEvalDAO ase = new AccumuloSelectivityEvalDAO(conf, accCon); - ase.setRdfEvalDAO(evalDao); - ase.init(); - - cardinality = starQuery.getCardinality(ase); - CardinalityStatementPattern csp = starQuery.getMinCardSp(ase); - - minCard = csp.getCardinality(); - minSp = csp.getSp(); - } else { - // TODO come up with a better default if cardinality is not - // initialized - cardinality = minCard = 1; - minSp = starQuery.getNodes().get(0); - } - - } - - @Override - public Set getBindingNames() { - return starQuery.getBindingNames(); - } - - @Override - public Set getAssuredBindingNames() { - return starQuery.getAssuredBindingNames(); - } - - public Set getVariables() { - return variables; - } - - - @Override - public String getSignature() { - return "(EntityCentric Projection) " + " common Var: " + starQuery.getCommonVarName() + " variables: " + Joiner.on(", ").join(variables).replaceAll("\\s+", " "); - } - - public StarQuery getStarQuery() { - return starQuery; - } - - public void setStarQuery(StarQuery sq) { - this.starQuery = sq; - } - - - @Override - public EntityTupleSet clone() { - StarQuery sq = new StarQuery(starQuery); - return new EntityTupleSet(sq, conf); - } - - - @Override - public double cardinality() { - return cardinality; - } - - - public double getMinSpCard() { - return minCard; - } - - - @Override - public CloseableIteration evaluate(BindingSet bindings) throws QueryEvaluationException { - - // if starQuery contains node with cardinality less than 1000 and node - // only has one variable, and number of SPs in starQuery is greater than 2, it is - // more efficient to first evaluate this node and then pass the bindings - // into the remainder of the star query to be evaluated - if (minCard < 1000 && starQuery.size() > 2 && numberOfSpVars(minSp) == 1 && !starQuery.commonVarConstant()) { - - try { - RdfCloudTripleStoreConnection conn = getRyaSailConnection(); - CloseableIteration sol = (CloseableIteration) conn - .evaluate(minSp, null, bindings, false); - - Set bSet = Sets.newHashSet(); - while (sol.hasNext()) { - //TODO this is not optimal - should check if bindings variables intersect minSp variables - //creating the following QueryBindingSet is only necessary if no intersection occurs - QueryBindingSet bs = new QueryBindingSet(); - bs.addAll(sol.next()); - bs.addAll(bindings); - bSet.add(bs); - } - - List spList = starQuery.getNodes(); - spList.remove(minSp); - - StarQuery sq = new StarQuery(spList); - conn.close(); - - return (new EntityTupleSet(sq, conf, true)).evaluate(bSet); - - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } else { - this.evalOptUsed = true; - return this.evaluate(Collections.singleton(bindings)); - } - - } - - - private int numberOfSpVars(StatementPattern sp) { - List varList = sp.getVarList(); - int varCount = 0; - - for(int i = 0; i < 3; i++) { - if(!varList.get(i).isConstant()) { - varCount++; - } - } - - return varCount; - } - - - @Override - public CloseableIteration evaluate(final Collection bindingset) throws QueryEvaluationException { - - if(bindingset.size() < 2 && !this.evalOptUsed) { - BindingSet bs = new QueryBindingSet(); - if (bindingset.size() == 1) { - bs = bindingset.iterator().next(); - } - return this.evaluate(bs); - } - //TODO possibly refactor if bindingset.size() > 0 to take advantage of optimization in evaluate(BindingSet bindingset) - AccumuloDocIdIndexer adi = null; - try { - adi = new AccumuloDocIdIndexer(conf); - return adi.queryDocIndex(starQuery, bindingset); - } catch (Exception e) { - throw new QueryEvaluationException(e); - } finally { - IOUtils.closeQuietly(adi); - } - } - - - private RdfCloudTripleStoreConnection getRyaSailConnection() throws AccumuloException, - AccumuloSecurityException, SailException { - final RdfCloudTripleStore store = new RdfCloudTripleStore(); - AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); - crdfdao.setConnector(accCon); - AccumuloRdfConfiguration acc = new AccumuloRdfConfiguration(conf); - crdfdao.setConf(acc); - store.setRyaDAO(crdfdao); - store.initialize(); - - return (RdfCloudTripleStoreConnection) store.getConnection(); - } - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java deleted file mode 100644 index e9d2f85ac..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java +++ /dev/null @@ -1,636 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.accumulo.documentIndex.TextColumn; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.joinselect.SelectivityEvalDAO; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; -import mvm.rya.joinselect.AccumuloSelectivityEvalDAO; - -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.hadoop.io.Text; -import org.openrdf.model.Value; -import org.openrdf.query.BindingSet; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -import com.beust.jcommander.internal.Maps; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import com.google.common.primitives.Bytes; - -public class StarQuery { - - private List nodes; - private TextColumn[] nodeColumnCond; - private String commonVarName; - private Var commonVar; - private Var context; - private String contextURI =""; - private Map varPos = Maps.newHashMap(); - private boolean isCommonVarURI = false; - - - public StarQuery(List nodes) { - this.nodes = nodes; - if(nodes.size() == 0) { - throw new IllegalArgumentException("Nodes cannot be empty!"); - } - nodeColumnCond = new TextColumn[nodes.size()]; - Var tempContext = (Var) nodes.get(0).getContextVar(); - if(tempContext != null) { - context = (Var)tempContext.clone(); - } else { - context = new Var(); - } - try { - this.init(); - } catch (RyaTypeResolverException e) { - e.printStackTrace(); - } - } - - - public StarQuery(Set nodes) { - this(Lists.newArrayList(nodes)); - } - - public int size() { - return nodes.size(); - } - - public StarQuery(StarQuery other) { - this(other.nodes); - } - - - public List getNodes() { - return nodes; - } - - - public TextColumn[] getColumnCond() { - return nodeColumnCond; - } - - - public boolean isCommonVarURI() { - return isCommonVarURI; - } - - public String getCommonVarName() { - return commonVarName; - } - - public Var getCommonVar() { - return commonVar; - } - - public boolean commonVarHasValue() { - return commonVar.getValue() != null; - } - - public boolean commonVarConstant() { - return commonVar.isConstant(); - } - - public String getCommonVarValue() { - if(commonVarHasValue()) { - return commonVar.getValue().stringValue(); - } else { - return null; - } - } - - - public Set getUnCommonVars() { - return varPos.keySet(); - } - - - public Map getVarPos() { - return varPos; - } - - public boolean hasContext() { - return context.getValue() != null; - } - - public String getContextURI() { - return contextURI; - } - - - - - public Set getBindingNames() { - - Set bindingNames = Sets.newHashSet(); - - for(StatementPattern sp: nodes) { - - if(bindingNames.size() == 0) { - bindingNames = sp.getBindingNames(); - } else { - bindingNames = Sets.union(bindingNames, sp.getBindingNames()); - } - - } - - return bindingNames; - - } - - - - - public Set getAssuredBindingNames() { - - Set bindingNames = Sets.newHashSet(); - - for(StatementPattern sp: nodes) { - - if(bindingNames.size() == 0) { - bindingNames = sp.getAssuredBindingNames(); - } else { - bindingNames = Sets.union(bindingNames, sp.getAssuredBindingNames()); - } - - } - - return bindingNames; - - } - - - - - - - - public CardinalityStatementPattern getMinCardSp(AccumuloSelectivityEvalDAO ase) { - - StatementPattern minSp = null; - double cardinality = Double.MAX_VALUE; - double tempCard = -1; - - for (StatementPattern sp : nodes) { - - try { - tempCard = ase.getCardinality(ase.getConf(), sp); - - if (tempCard < cardinality) { - cardinality = tempCard; - minSp = sp; - } - } catch (TableNotFoundException e) { - e.printStackTrace(); - } - - - } - - return new CardinalityStatementPattern(minSp, cardinality) ; - } - - - - public class CardinalityStatementPattern { - - private StatementPattern sp; - private double cardinality; - - public CardinalityStatementPattern(StatementPattern sp, double cardinality) { - this.sp = sp; - this.cardinality = cardinality; - } - - public StatementPattern getSp() { - return sp; - } - - public double getCardinality() { - return cardinality; - } - - } - - - public double getCardinality( AccumuloSelectivityEvalDAO ase) { - - double cardinality = Double.MAX_VALUE; - double tempCard = -1; - - ase.setDenormalized(true); - - try { - - for (int i = 0; i < nodes.size(); i++) { - for (int j = i + 1; j < nodes.size(); j++) { - - tempCard = ase.getJoinSelect(ase.getConf(), nodes.get(i), nodes.get(j)); - - if (tempCard < cardinality) { - cardinality = tempCard; - } - - } - } - - } catch (Exception e) { - e.printStackTrace(); - } - - ase.setDenormalized(false); - - return cardinality/(nodes.size() + 1); - - } - - - - public static Set getCommonVars(StarQuery query, BindingSet bs) { - - Set starQueryVarNames = Sets.newHashSet(); - - if(bs == null || bs.size() == 0) { - return Sets.newHashSet(); - } - - Set bindingNames = bs.getBindingNames(); - starQueryVarNames.addAll(query.getUnCommonVars()); - if(!query.commonVarConstant()) { - starQueryVarNames.add(query.getCommonVarName()); - } - - return Sets.intersection(bindingNames, starQueryVarNames); - - - } - - - - - - - public static StarQuery getConstrainedStarQuery(StarQuery query, BindingSet bs) { - - if(bs.size() == 0) { - return query; - } - - Set bindingNames = bs.getBindingNames(); - Set unCommonVarNames = query.getUnCommonVars(); - Set intersectVar = Sets.intersection(bindingNames, unCommonVarNames); - - - if (!query.commonVarConstant()) { - - Value v = bs.getValue(query.getCommonVarName()); - - if (v != null) { - query.commonVar.setValue(v); - } - } - - for(String s: intersectVar) { - try { - query.nodeColumnCond[query.varPos.get(s)] = query.setValue(query.nodeColumnCond[query.varPos.get(s)], bs.getValue(s)); - } catch (RyaTypeResolverException e) { - e.printStackTrace(); - } - } - - return query; - } - - - private TextColumn setValue(TextColumn tc, Value v) throws RyaTypeResolverException { - - String cq = tc.getColumnQualifier().toString(); - String[] cqArray = cq.split("\u0000"); - - if (cqArray[0].equals("subject")) { - // RyaURI subjURI = (RyaURI) RdfToRyaConversions.convertValue(v); - tc.setColumnQualifier(new Text("subject" + "\u0000" + v.stringValue())); - tc.setIsPrefix(false); - } else if (cqArray[0].equals("object")) { - RyaType objType = RdfToRyaConversions.convertValue(v); - byte[][] b1 = RyaContext.getInstance().serializeType(objType); - byte[] b2 = Bytes.concat("object".getBytes(), - "\u0000".getBytes(), b1[0], b1[1]); - tc.setColumnQualifier(new Text(b2)); - tc.setIsPrefix(false); - } else { - throw new IllegalStateException("Invalid direction!"); - } - - return tc; - - } - - - - //assumes nodes forms valid star query with only one common variable - //assumes nodes and commonVar has been set - private TextColumn nodeToTextColumn(StatementPattern node, int i) throws RyaTypeResolverException { - - RyaContext rc = RyaContext.getInstance(); - - Var subjVar = node.getSubjectVar(); - Var predVar = node.getPredicateVar(); - Var objVar = node.getObjectVar(); - - RyaURI predURI = (RyaURI) RdfToRyaConversions.convertValue(node.getPredicateVar().getValue()); - - - //assumes StatementPattern contains at least on variable - if (subjVar.isConstant()) { - if (commonVarConstant()) { - varPos.put(objVar.getName(), i); - return new TextColumn(new Text(predURI.getData()), new Text("object")); - } else { - return new TextColumn(new Text(predURI.getData()), new Text("subject" + "\u0000" - + subjVar.getValue().stringValue())); - } - - } else if (objVar.isConstant()) { - - if (commonVarConstant()) { - varPos.put(subjVar.getName(), i); - return new TextColumn(new Text(predURI.getData()), new Text("subject")); - } else { - - isCommonVarURI = true; - RyaType objType = RdfToRyaConversions.convertValue(objVar.getValue()); - byte[][] b1 = rc.serializeType(objType); - - byte[] b2 = Bytes.concat("object".getBytes(), "\u0000".getBytes(), b1[0], b1[1]); - return new TextColumn(new Text(predURI.getData()), new Text(b2)); - } - - } else { - if (subjVar.getName().equals(commonVarName)) { - - isCommonVarURI = true; - varPos.put(objVar.getName(), i); - - TextColumn tc = new TextColumn(new Text(predURI.getData()), new Text("object")); - tc.setIsPrefix(true); - return tc; - - } else { - - varPos.put(subjVar.getName(), i); - - TextColumn tc = new TextColumn(new Text(predURI.getData()), new Text("subject")); - tc.setIsPrefix(true); - return tc; - - } - - - } - - - } - - - - - //called in constructor after nodes set - //assumes nodes and nodeColumnCond are same size - private void init() throws RyaTypeResolverException { - - - commonVar = this.getCommonVar(nodes); - if(!commonVar.isConstant()) { - commonVarName = commonVar.getName(); - } else { - commonVarName = commonVar.getName().substring(7); - } - - if(hasContext()) { - RyaURI ctxtURI = (RyaURI) RdfToRyaConversions.convertValue(context.getValue()); - contextURI = ctxtURI.getData(); - } - - for(int i = 0; i < nodes.size(); i++){ - nodeColumnCond[i] = nodeToTextColumn(nodes.get(i), i); - } - - } - - - - - - - - - // called after nodes set - // assumes nodes forms valid query with single, common variable - private Var getCommonVar(List nodes) { - - Set vars = null; - List tempVar; - Set tempSet; - - int i = 0; - for (StatementPattern sp : nodes) { - - if (vars == null) { - vars = Sets.newHashSet(); - vars.add(sp.getSubjectVar()); - vars.add(sp.getObjectVar()); - } else { - tempSet = Sets.newHashSet(); - tempSet.add(sp.getSubjectVar()); - tempSet.add(sp.getObjectVar()); - vars = Sets.intersection(vars, tempSet); - } - - } - - if (vars.size() == 1) { - return vars.iterator().next(); - } else if (vars.size() > 1) { - Var first = null; - - i = 0; - - for (Var v : vars) { - i++; - - if (i == 1) { - first = v; - } else { - if (v.isConstant()) { - return v; - } - } - } - - return first; - - } else { - throw new IllegalStateException("No common Var!"); - } - - } - - - //assumes bindings is not of size 0 - private static boolean isBindingsetValid(Set bindings) { - - int varCount = 0; - - if (bindings.size() == 1) { - return true; - } else { - - - for (String s : bindings) { - if (!s.startsWith("-const-")) { - varCount++; - } - if (varCount > 1) { - return false; - } - } - - return true; - - } - - } - - - - - - public static boolean isValidStarQuery(Collection nodes) { - - Set bindings = null; - boolean contextSet = false; - Var context = null; - - if(nodes.size() < 2) { - return false; - } - - for(StatementPattern sp: nodes) { - - Var tempContext = sp.getContextVar(); - Var predVar = sp.getPredicateVar(); - - //does not support variable context - if(tempContext != null && !tempContext.isConstant()) { - return false; - } - if(!contextSet) { - context = tempContext; - contextSet = true; - } else { - - if(context == null && tempContext != null) { - return false; - } else if (context != null && !context.equals(tempContext)) { - return false; - } - } - - if(!predVar.isConstant()) { - return false; - } - - if(bindings == null ) { - bindings = sp.getBindingNames(); - if(bindings.size() == 0) { - return false; - } - } else { - bindings = Sets.intersection(bindings, sp.getBindingNames()); - if(bindings.size() == 0) { - return false; - } - } - - } - - - return isBindingsetValid(bindings); - } - - - - - -// private static Set getSpVariables(StatementPattern sp) { -// -// Set variables = Sets.newHashSet(); -// List varList = sp.getVarList(); -// -// for(Var v: varList) { -// if(!v.isConstant()) { -// variables.add(v.getName()); -// } -// } -// -// return variables; -// -// } -// - - - - - - public String toString() { - - String s = "Term conditions: " + "\n"; - - for(int i = 0; i < this.nodeColumnCond.length; i++) { - s = s + nodeColumnCond[i].toString() + "\n"; - } - - s = s + "Common Var: " + this.commonVar.toString() + "\n"; - s = s + "Context: " + this.contextURI; - - return s; - - } - - - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java deleted file mode 100644 index f52956909..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java +++ /dev/null @@ -1,611 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.getNodeIterator; -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.nio.charset.CharacterCodingException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map.Entry; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.indexing.FreeTextIndexer; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.Md5Hash; -import mvm.rya.indexing.accumulo.StatementSerializer; -import mvm.rya.indexing.accumulo.freetext.iterators.BooleanTreeIterator; -import mvm.rya.indexing.accumulo.freetext.query.ASTExpression; -import mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils; -import mvm.rya.indexing.accumulo.freetext.query.ASTSimpleNode; -import mvm.rya.indexing.accumulo.freetext.query.ASTTerm; -import mvm.rya.indexing.accumulo.freetext.query.ParseException; -import mvm.rya.indexing.accumulo.freetext.query.QueryParser; -import mvm.rya.indexing.accumulo.freetext.query.QueryParserTreeConstants; -import mvm.rya.indexing.accumulo.freetext.query.SimpleNode; -import mvm.rya.indexing.accumulo.freetext.query.TokenMgrError; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.client.MultiTableBatchWriter; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.file.keyfunctor.ColumnFamilyFunctor; -import org.apache.accumulo.core.iterators.user.IntersectingIterator; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.Validate; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; -import org.openrdf.model.Literal; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -import com.google.common.base.Charsets; - -/** - * The {@link AccumuloFreeTextIndexer} stores and queries "free text" data from statements into tables in Accumulo. Specifically, this class - * stores data into two different Accumulo Tables. This is the document table (default name: triplestore_text) and the terms - * table (default name: triplestore_terms). - *

- * The document table stores the document (i.e. a triple statement), document properties, and the terms within the document. This is the - * main table used for processing a text search by using document partitioned indexing. See {@link IntersectingIterator}. - *

- * For each document, the document table will store the following information: - *

- * - *

- * Row (partition) | Column Family  | Column Qualifier | Value 
- * ================+================+==================+==========
- * shardID         | d\x00          | documentHash     | Document 
- * shardID         | s\x00Subject   | documentHash     | (empty) 
- * shardID         | p\x00Predicate | documentHash     | (empty) 
- * shardID         | o\x00Object    | documentHash     | (empty) 
- * shardID         | c\x00Context   | documentHash     | (empty) 
- * shardID         | t\x00token     | documentHash     | (empty)
- * 
- *

- * Note: documentHash is a sha256 Hash of the Document's Content - *

- * The terms table is used for expanding wildcard search terms. For each token in the document table, the table sill store the following - * information: - * - *

- * Row (partition)   | CF/CQ/Value 
- * ==================+=============
- * l\x00token        | (empty) 
- * r\x00Reversetoken | (empty)
- * 
- *

- * There are two prefixes in the table, "token list" (keys with an "l" prefix) and "reverse token list" (keys with a "r" prefix). This table - * is uses the "token list" to expand foo* into terms like food, foot, and football. This table uses the "reverse token list" to expand *ar - * into car, bar, and far. - *

- * Example: Given these three statements as inputs: - * - *

- *      rdfs:label "paul smith"@en 
- *      rdfs:label "steven anthony miller"@en 
- *      rdfs:label "steve miller"@en 
- * 
- *

- * Here's what the tables would look like: (Note: the hashes aren't real, the rows are not sorted, and the partition ids will vary.) - *

- * Triplestore_text - * - *

- * Row (partition) | Column Family                   | Column Qualifier | Value 
- * ================+=================================+==================+==========
- * 000000          | d\x00                           | 08b3d233a        | uri:graph1x00uri:paul\x00rdfs:label\x00"paul smith"@en
- * 000000          | s\x00uri:paul                   | 08b3d233a        | (empty)
- * 000000          | p\x00rdfs:label                 | 08b3d233a        | (empty)
- * 000000          | o\x00"paul smith"@en            | 08b3d233a        | (empty)
- * 000000          | c\x00uri:graph1                 | 08b3d233a        | (empty)
- * 000000          | t\x00paul                       | 08b3d233a        | (empty)
- * 000000          | t\x00smith                      | 08b3d233a        | (empty)
- * 
- * 000000          | d\x00                           | 3a575534b        | uri:graph1x00uri:steve\x00rdfs:label\x00"steven anthony miller"@en
- * 000000          | s\x00uri:steve                  | 3a575534b        | (empty)
- * 000000          | p\x00rdfs:label                 | 3a575534b        | (empty)
- * 000000          | o\x00"steven anthony miller"@en | 3a575534b        | (empty)
- * 000000          | c\x00uri:graph1                 | 3a575534b        | (empty)
- * 000000          | t\x00steven                     | 3a575534b        | (empty)
- * 000000          | t\x00anthony                    | 3a575534b        | (empty)
- * 000000          | t\x00miller                     | 3a575534b        | (empty)
- * 
- * 000001          | d\x00                           | 7bf670d06        | uri:graph1x00uri:steve\x00rdfs:label\x00"steve miller"@en
- * 000001          | s\x00uri:steve                  | 7bf670d06        | (empty)
- * 000001          | p\x00rdfs:label                 | 7bf670d06        | (empty)
- * 000001          | o\x00"steve miller"@en          | 7bf670d06        | (empty)
- * 000001          | c\x00uri:graph1                 | 7bf670d06        | (empty)
- * 000001          | t\x00steve                      | 7bf670d06        | (empty)
- * 000001          | t\x00miller                     | 7bf670d06        | (empty)
- * 
- *

- * triplestore_terms - *

- * - *

- * Row (partition)   | CF/CQ/Value 
- * ==================+=============
- * l\x00paul         | (empty)
- * l\x00smith        | (empty)
- * l\x00steven       | (empty)
- * l\x00anthony      | (empty)
- * l\x00miller       | (empty)
- * l\x00steve        | (empty)
- * r\x00luap         | (empty)
- * r\x00htims        | (empty)
- * r\x00nevets       | (empty)
- * r\x00ynohtna      | (empty)
- * r\x00rellim       | (empty)
- * r\x00evets        | (empty)
- * 
- * 
- */
-public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements FreeTextIndexer  {
-    private static final Logger logger = Logger.getLogger(AccumuloFreeTextIndexer.class);
-
-    private static final byte[] EMPTY_BYTES = new byte[] {};
-    private static final Text EMPTY_TEXT = new Text(EMPTY_BYTES);
-    private static final Value EMPTY_VALUE = new Value(EMPTY_BYTES);
-
-    private Tokenizer tokenizer;
-
-    private BatchWriter docTableBw;
-    private BatchWriter termTableBw;
-    private MultiTableBatchWriter mtbw;
-
-    private int queryTermLimit;
-
-    private int docTableNumPartitions;
-
-    private Set validPredicates;
-
-    private Configuration conf;
-    
-    private boolean isInit = false;
-
-    
-    private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException,
-            TableExistsException {
-        String doctable = ConfigUtils.getFreeTextDocTablename(conf);
-        String termtable = ConfigUtils.getFreeTextTermTablename(conf);
-
-        docTableNumPartitions = ConfigUtils.getFreeTextDocNumPartitions(conf);
-        int termTableNumPartitions = ConfigUtils.getFreeTextTermNumPartitions(conf);
-
-        TableOperations tableOps = ConfigUtils.getConnector(conf).tableOperations();
-
-        // Create term table partitions
-        boolean createdTermTable = ConfigUtils.createTableIfNotExists(conf, termtable);
-        if (createdTermTable && !ConfigUtils.useMockInstance(conf) && termTableNumPartitions > 0) {
-            TreeSet splits = new TreeSet();
-
-            // split on the "Term List" and "Reverse Term list" boundary
-            splits.add(new Text(ColumnPrefixes.getRevTermListColFam("")));
-
-            // Symmetrically split the "Term List" and "Reverse Term list"
-            int numSubpartitions = ((termTableNumPartitions - 1) / 2);
-            if (numSubpartitions > 0) {
-                int step = (26 / numSubpartitions);
-                for (int i = 0; i < numSubpartitions; i++) {
-                    String nextChar = String.valueOf((char) ('a' + (step * i)));
-                    splits.add(new Text(ColumnPrefixes.getTermListColFam(nextChar)));
-                    splits.add(new Text(ColumnPrefixes.getRevTermListColFam(nextChar)));
-                }
-            }
-            tableOps.addSplits(termtable, splits);
-        }
-
-        // Create document (text) table partitions
-        boolean createdDocTable = ConfigUtils.createTableIfNotExists(conf, doctable);
-        if (createdDocTable && !ConfigUtils.useMockInstance(conf)) {
-            TreeSet splits = new TreeSet();
-            for (int i = 0; i < docTableNumPartitions; i++) {
-                splits.add(genPartition(i, docTableNumPartitions));
-            }
-            tableOps.addSplits(doctable, splits);
-
-            // Add a tablet level Bloom filter for the Column Family.
-            // This will allow us to quickly determine if a term is contained in a tablet.
-            tableOps.setProperty(doctable, "table.bloom.key.functor", ColumnFamilyFunctor.class.getCanonicalName());
-            tableOps.setProperty(doctable, "table.bloom.enabled", Boolean.TRUE.toString());
-        }
-
-        mtbw = ConfigUtils.createMultitableBatchWriter(conf);
-
-        docTableBw = mtbw.getBatchWriter(doctable);
-        termTableBw = mtbw.getBatchWriter(termtable);
-
-        tokenizer = ConfigUtils.getFreeTextTokenizer(conf);
-        validPredicates = ConfigUtils.getFreeTextPredicates(conf);
-
-        queryTermLimit = ConfigUtils.getFreeTextTermLimit(conf);
-    }
-    
-    
-  //initialization occurs in setConf because index is created using reflection
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-        if (!isInit) {
-            try {
-                init();
-                isInit = true;
-            } catch (AccumuloException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (AccumuloSecurityException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (TableNotFoundException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (TableExistsException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            }
-        }
-    }
-    
-    @Override
-    public Configuration getConf() {
-        return this.conf;
-    }
-    
-
-    private void storeStatement(Statement statement) throws IOException {
-        // if the predicate list is empty, accept all predicates.
-        // Otherwise, make sure the predicate is on the "valid" list
-        boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate());
-
-        if (isValidPredicate && (statement.getObject() instanceof Literal)) {
-
-            // Get the tokens
-            String text = statement.getObject().stringValue().toLowerCase();
-            SortedSet tokens = tokenizer.tokenize(text);
-
-            if (!tokens.isEmpty()) {
-                // Get Document Data
-                String docContent = StatementSerializer.writeStatement(statement);
-
-                String docId = Md5Hash.md5Base64(docContent);
-
-                // Setup partition
-                Text partition = genPartition(docContent.hashCode(), docTableNumPartitions);
-
-                Mutation docTableMut = new Mutation(partition);
-                List termTableMutations = new ArrayList();
-
-                Text docIdText = new Text(docId);
-
-                // Store the Document Data
-                docTableMut.put(ColumnPrefixes.DOCS_CF_PREFIX, docIdText, new Value(docContent.getBytes(Charsets.UTF_8)));
-
-                // index the statement parts
-                docTableMut.put(ColumnPrefixes.getSubjColFam(statement), docIdText, EMPTY_VALUE);
-                docTableMut.put(ColumnPrefixes.getPredColFam(statement), docIdText, EMPTY_VALUE);
-                docTableMut.put(ColumnPrefixes.getObjColFam(statement), docIdText, EMPTY_VALUE);
-                docTableMut.put(ColumnPrefixes.getContextColFam(statement), docIdText, EMPTY_VALUE);
-
-                // index the statement terms
-                for (String token : tokens) {
-                    // tie the token to the document
-                    docTableMut.put(ColumnPrefixes.getTermColFam(token), docIdText, EMPTY_VALUE);
-
-                    // store the term in the term table (useful for wildcard searches)
-                    termTableMutations.add(createEmptyPutMutation(ColumnPrefixes.getTermListColFam(token)));
-                    termTableMutations.add(createEmptyPutMutation(ColumnPrefixes.getRevTermListColFam(token)));
-                }
-
-                // write the mutations
-                try {
-                    docTableBw.addMutation(docTableMut);
-                    termTableBw.addMutations(termTableMutations);
-                } catch (MutationsRejectedException e) {
-                    logger.error("error adding mutation", e);
-                    throw new IOException(e);
-                }
-
-            }
-
-        }
-    }
-
-    @Override
-    public void storeStatement(RyaStatement statement) throws IOException {
-        storeStatement(RyaToRdfConversions.convertStatement(statement));
-    }
-
-    private static Mutation createEmptyPutMutation(Text row) {
-        Mutation m = new Mutation(row);
-        m.put(EMPTY_TEXT, EMPTY_TEXT, EMPTY_VALUE);
-        return m;
-    }
-
-    private static Text genPartition(int partition, int numParitions) {
-        int length = Integer.toString(numParitions).length();
-        return new Text(String.format("%0" + length + "d", Math.abs(partition % numParitions)));
-    }
-
-    @Override
-    public Set getIndexablePredicates() {
-        return validPredicates;
-    }
-
-    /** {@inheritDoc} */
-    @Override
-    public void flush() throws IOException {
-        try {
-            mtbw.flush();
-        } catch (MutationsRejectedException e) {
-            logger.error("error flushing the batch writer", e);
-            throw new IOException(e);
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override
-    public void close() throws IOException {
-        try {
-            mtbw.close();
-        } catch (MutationsRejectedException e) {
-            logger.error("error closing the batch writer", e);
-            throw new IOException(e);
-        }
-    }
-
-    private Set unrollWildcard(String string, boolean reverse) throws IOException {
-        Scanner termTableScan = getScanner(ConfigUtils.getFreeTextTermTablename(conf));
-
-        Set unrolledTerms = new HashSet();
-
-        Text queryTerm;
-        if (reverse) {
-            String t = StringUtils.removeStart(string, "*").toLowerCase();
-            queryTerm = ColumnPrefixes.getRevTermListColFam(t);
-        } else {
-            String t = StringUtils.removeEnd(string, "*").toLowerCase();
-            queryTerm = ColumnPrefixes.getTermListColFam(t);
-        }
-
-        // perform query and read results
-        termTableScan.setRange(Range.prefix(queryTerm));
-
-        for (Entry e : termTableScan) {
-            String term = ColumnPrefixes.removePrefix(e.getKey().getRow()).toString();
-            if (reverse) {
-                unrolledTerms.add(StringUtils.reverse(term));
-            } else {
-                unrolledTerms.add(term);
-            }
-        }
-
-        if (unrolledTerms.isEmpty()) {
-            // put in a placeholder term that will never be in the index.
-            unrolledTerms.add("\1\1\1");
-        }
-
-        return unrolledTerms;
-    }
-
-    private void unrollWildcards(SimpleNode node) throws IOException {
-        if (node instanceof ASTExpression || node instanceof ASTSimpleNode) {
-            for (SimpleNode n : getNodeIterator(node)) {
-                unrollWildcards(n);
-            }
-        } else if (node instanceof ASTTerm) {
-            ASTTerm term = (ASTTerm) node;
-            boolean isWildTerm = term.getType().equals(ASTTerm.WILDTERM);
-            boolean isPreWildTerm = term.getType().equals(ASTTerm.PREFIXTERM);
-            if (isWildTerm || isPreWildTerm) {
-                Set unrolledTerms = unrollWildcard(term.getTerm(), isPreWildTerm);
-
-                // create a new expression
-                ASTExpression newExpression = new ASTExpression(QueryParserTreeConstants.JJTEXPRESSION);
-                newExpression.setType(ASTExpression.OR);
-                newExpression.setNotFlag(term.isNotFlag());
-
-                for (String unrolledTerm : unrolledTerms) {
-                    ASTTerm t = new ASTTerm(QueryParserTreeConstants.JJTTERM);
-                    t.setNotFlag(false);
-                    t.setTerm(unrolledTerm);
-                    t.setType(ASTTerm.TERM);
-                    ASTNodeUtils.pushChild(newExpression, t);
-                }
-
-                // replace "term" node with "expression" node in "term" node parent
-                SimpleNode parent = (SimpleNode) term.jjtGetParent();
-                int index = ASTNodeUtils.getChildIndex(parent, term);
-
-                Validate.isTrue(index >= 0, "child not found in parent");
-
-                parent.jjtAddChild(newExpression, index);
-            }
-
-        } else {
-            throw new IllegalArgumentException("Node is of unknown type: " + node.getClass().getName());
-        }
-    }
-
-    private Scanner getScanner(String tablename) throws IOException {
-        try {
-            return ConfigUtils.createScanner(tablename, conf);
-        } catch (AccumuloException e) {
-            logger.error("Error connecting to " + tablename);
-            throw new IOException(e);
-        } catch (AccumuloSecurityException e) {
-            logger.error("Error connecting to " + tablename);
-            throw new IOException(e);
-        } catch (TableNotFoundException e) {
-            logger.error("Error connecting to " + tablename);
-            throw new IOException(e);
-        }
-    }
-
-    /** {@inheritDoc} */
-    @Override
-    public CloseableIteration queryText(String query, StatementContraints contraints)
-            throws IOException {
-        Scanner docTableScan = getScanner(ConfigUtils.getFreeTextDocTablename(conf));
-
-        // test the query to see if it's parses correctly.
-        SimpleNode root = parseQuery(query);
-
-        // unroll any wildcard nodes before it goes to the server
-        unrollWildcards(root);
-
-        String unrolledQuery = ASTNodeUtils.serializeExpression(root);
-
-        // Add S P O C constraints to query
-        StringBuilder constrainedQuery = new StringBuilder("(" + unrolledQuery + ")");
-
-        if (contraints.hasSubject()) {
-            constrainedQuery.append(" AND ");
-            constrainedQuery.append(ColumnPrefixes.getSubjColFam(contraints.getSubject().toString()).toString());
-        }
-        if (contraints.hasContext()) {
-            constrainedQuery.append(" AND ");
-            constrainedQuery.append(ColumnPrefixes.getContextColFam(contraints.getContext().toString()).toString());
-        }
-        if (contraints.hasPredicates()) {
-            constrainedQuery.append(" AND (");
-            List predicates = new ArrayList();
-            for (URI u : contraints.getPredicates()) {
-                predicates.add(ColumnPrefixes.getPredColFam(u.stringValue()).toString());
-            }
-            constrainedQuery.append(StringUtils.join(predicates, " OR "));
-            constrainedQuery.append(")");
-        }
-
-        // Verify that the query is a reasonable size
-        root = parseQuery(constrainedQuery.toString());
-        int termCount = ASTNodeUtils.termCount(root);
-
-        if (termCount > queryTermLimit) {
-            throw new IOException("Query contains too many terms.  Term limit: " + queryTermLimit + ".  Term Count: " + termCount);
-        }
-
-        // perform query
-        docTableScan.clearScanIterators();
-        docTableScan.clearColumns();
-
-        int iteratorPriority = 20;
-        String iteratorName = "booleanTree";
-        IteratorSetting ii = new IteratorSetting(iteratorPriority, iteratorName, BooleanTreeIterator.class);
-        BooleanTreeIterator.setQuery(ii, constrainedQuery.toString());
-        docTableScan.addScanIterator(ii);
-        docTableScan.setRange(new Range());
-
-        return getIteratorWrapper(docTableScan);
-    }
-
-    private static CloseableIteration getIteratorWrapper(final Scanner s) {
-
-        final Iterator> i = s.iterator();
-
-        return new CloseableIteration() {
-            @Override
-            public boolean hasNext() {
-                return i.hasNext();
-            }
-
-            @Override
-            public Statement next() throws QueryEvaluationException {
-                Entry entry = i.next();
-                Value v = entry.getValue();
-                try {
-                    String dataString = Text.decode(v.get(), 0, v.getSize());
-                    Statement s = StatementSerializer.readStatement(dataString);
-                    return s;
-                } catch (CharacterCodingException e) {
-                    logger.error("Error decoding value", e);
-                    throw new QueryEvaluationException(e);
-                } catch (IOException e) {
-                    logger.error("Error deserializing statement", e);
-                    throw new QueryEvaluationException(e);
-                }
-            }
-
-            @Override
-            public void remove() {
-                throw new UnsupportedOperationException("Remove not implemented");
-            }
-
-            @Override
-            public void close() throws QueryEvaluationException {
-                s.close();
-            }
-        };
-    }
-
-    /**
-     * Simple adapter that parses the query using {@link QueryParser}. Note: any checked exceptions thrown by {@link QueryParser} are
-     * re-thrown as {@link IOException}s.
-     * 
-     * @param query
-     * @return
-     * @throws IOException
-     */
-    private static SimpleNode parseQuery(String query) throws IOException {
-        SimpleNode root = null;
-        try {
-            root = QueryParser.parse(query);
-        } catch (ParseException e) {
-            logger.error("Parser Exception on Client Side. Query: " + query, e);
-            throw new IOException(e);
-        } catch (TokenMgrError e) {
-            logger.error("Token Manager Exception on Client Side. Query: " + query, e);
-            throw new IOException(e);
-        }
-        return root;
-    }
-    
-   
-    @Override
-    public String getTableName() {
-       return ConfigUtils.getFreeTextDocTablename(conf);
-    }
-
-    
-}
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java
deleted file mode 100644
index 31666c98f..000000000
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package mvm.rya.indexing.accumulo.freetext;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-
-import java.nio.ByteBuffer;
-import java.nio.charset.CharacterCodingException;
-
-import mvm.rya.indexing.accumulo.StatementSerializer;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.io.Text;
-import org.openrdf.model.Statement;
-
-/**
- * Row ID: shardId
- * 

- * CF: CF Prefix + Term - */ -public class ColumnPrefixes { - public static final Text DOCS_CF_PREFIX = new Text("d\0"); - public static final Text TERM_CF_PREFIX = new Text("t\0"); - public static final Text TERM_LIST_CF_PREFIX = new Text("l\0"); - public static final Text REVERSE_TERM_LIST_CF_PREFIX = new Text("r\0"); - - public static final Text SUBJECT_CF_PREFIX = new Text("s\0"); - public static final Text PREDICATE_CF_PREFIX = new Text("p\0"); - public static final Text OBJECT_CF_PREFIX = new Text("o\0"); - public static final Text CONTEXT_CF_PREFIX = new Text("c\0"); - - private static Text concat(Text prefix, String str) { - Text temp = new Text(prefix); - - try { - ByteBuffer buffer = Text.encode(str, false); - temp.append(buffer.array(), 0, buffer.limit()); - } catch (CharacterCodingException cce) { - throw new IllegalArgumentException(cce); - } - - return temp; - } - - public static Text getTermColFam(String term) { - return concat(TERM_CF_PREFIX, term); - } - - public static Text getTermListColFam(String term) { - return concat(TERM_LIST_CF_PREFIX, term); - } - - public static Text getRevTermListColFam(String term) { - return concat(REVERSE_TERM_LIST_CF_PREFIX, StringUtils.reverse(term)); - } - - public static Text getDocColFam(String term) { - return concat(DOCS_CF_PREFIX, term); - } - - public static Text getSubjColFam(String term) { - return concat(SUBJECT_CF_PREFIX, term); - } - - public static Text getSubjColFam(Statement statement) { - String subj = StatementSerializer.writeSubject(statement); - return getSubjColFam(subj); - } - - public static Text getPredColFam(String term) { - return concat(PREDICATE_CF_PREFIX, term); - } - - public static Text getPredColFam(Statement statement) { - String pred = StatementSerializer.writePredicate(statement); - return getPredColFam(pred); - } - - public static Text getObjColFam(String term) { - return concat(OBJECT_CF_PREFIX, term); - } - - public static Text getObjColFam(Statement statement) { - String obj = StatementSerializer.writeObject(statement); - return getObjColFam(obj); - } - - public static Text getContextColFam(String term) { - return concat(CONTEXT_CF_PREFIX, term); - } - - public static Text getContextColFam(Statement statement) { - String cont = StatementSerializer.writeContext(statement); - return getContextColFam(cont); - } - - public static Text removePrefix(Text termWithPrefix) { - Text temp = new Text(); - temp.set(termWithPrefix.getBytes(), 2, termWithPrefix.getLength() - 2); - return temp; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java deleted file mode 100644 index 471870bf9..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java +++ /dev/null @@ -1,160 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.util.Set; - -import mvm.rya.indexing.FreeTextIndexer; -import mvm.rya.indexing.IndexingExpr; -import mvm.rya.indexing.IteratorFactory; -import mvm.rya.indexing.SearchFunction; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.QueryModelVisitor; - -import com.google.common.base.Joiner; - - -//Indexing Node for freetext expressions to be inserted into execution plan -//to delegate freetext portion of query to free text index -public class FreeTextTupleSet extends ExternalTupleSet { - - private Configuration conf; - private FreeTextIndexer freeTextIndexer; - private IndexingExpr filterInfo; - - - public FreeTextTupleSet(IndexingExpr filterInfo, FreeTextIndexer freeTextIndexer) { - this.filterInfo = filterInfo; - this.freeTextIndexer = freeTextIndexer; - this.conf = freeTextIndexer.getConf(); - } - - /** - * {@inheritDoc} - */ - @Override - public Set getBindingNames() { - return filterInfo.getBindingNames(); - } - - /** - * {@inheritDoc} - *

- * Note that we need a deep copy for everything that (during optimizations) - * can be altered via {@link #visitChildren(QueryModelVisitor)} - */ - public FreeTextTupleSet clone() { - return new FreeTextTupleSet(filterInfo, freeTextIndexer); - } - - @Override - public double cardinality() { - return 0.0; // No idea how the estimate cardinality here. - } - - - - - @Override - public String getSignature() { - - return "(FreeTextTuple Projection) " + "variables: " + Joiner.on(", ").join(this.getBindingNames()).replaceAll("\\s+", " "); - } - - - - @Override - public boolean equals(Object other) { - if (other == this) { - return true; - } - if (!(other instanceof FreeTextTupleSet)) { - return false; - } - - FreeTextTupleSet arg = (FreeTextTupleSet) other; - return this.filterInfo.equals(arg.filterInfo); - } - - - @Override - public int hashCode() { - int result = 17; - result = 31*result + filterInfo.hashCode(); - - return result; - } - - - - /** - * Returns an iterator over the result set of the contained {@link IndexExpr}. - *

- * Should be thread-safe (concurrent invocation {@link OfflineIterable} this - * method can be expected with some query evaluators. - */ - @Override - public CloseableIteration evaluate(BindingSet bindings) - throws QueryEvaluationException { - - - URI funcURI = filterInfo.getFunction(); - - SearchFunction searchFunction = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - CloseableIteration statements = freeTextIndexer.queryText( - queryText, contraints); - return statements; - } catch (IOException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "TEXT"; - }; - }; - - if (filterInfo.getArguments().length > 1) { - throw new IllegalArgumentException("Index functions do not support more than two arguments."); - } - - String queryText = filterInfo.getArguments()[0].stringValue(); - - return IteratorFactory.getIterator(filterInfo.getSpConstraint(), bindings, queryText, searchFunction); - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java deleted file mode 100644 index abda04acf..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java +++ /dev/null @@ -1,57 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.io.StringReader; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.util.Version; - -/** - * A {@link Tokenizer} that delegates to Lucene functions - */ -public class LuceneTokenizer implements Tokenizer { - private static final Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_36); - - @Override - public SortedSet tokenize(String string) { - SortedSet set = new TreeSet(); - try { - TokenStream stream = analyzer.tokenStream(null, new StringReader(string)); - stream.reset(); - while (stream.incrementToken()) { - set.add(stream.getAttribute(CharTermAttribute.class).toString()); - } - } catch (IOException e) { - // not thrown b/c we're using a string reader... - throw new RuntimeException(e); - } - - return set; - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java deleted file mode 100644 index e98e676cc..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java +++ /dev/null @@ -1,43 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * A {@link Tokenizer} that splits on whitespace. - */ -public class SimpleTokenizer implements Tokenizer { - - @Override - public SortedSet tokenize(String sting) { - SortedSet set = new TreeSet(); - for (String token : sting.split("\\s+")) { - String t = token.trim().toLowerCase(); - if (!t.isEmpty()) { - set.add(t); - } - } - return set; - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java deleted file mode 100644 index 24b40cd5e..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java +++ /dev/null @@ -1,31 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.SortedSet; - -/** - * A utility that splits a string into tokens. - */ -public interface Tokenizer { - public SortedSet tokenize(String sting); -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java deleted file mode 100644 index 355fe14cd..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java +++ /dev/null @@ -1,563 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext.iterators; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; - -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.data.ArrayByteSequence; -import org.apache.accumulo.core.data.ByteSequence; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.PartialKey; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.iterators.IteratorEnvironment; -import org.apache.accumulo.core.iterators.SortedKeyValueIterator; -import org.apache.accumulo.core.iterators.user.IntersectingIterator; -import org.apache.accumulo.core.util.TextUtil; -import org.apache.commons.codec.binary.Base64; -import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; - -/** - * Adapted from {@link IntersectingIterator} with very slight modifications. Specifically, the comparator on the TermSource internal class was - * modified to handle exhausted iterators and multiple rows per tablet server. - */ -public class AndingIterator implements SortedKeyValueIterator { - - protected Text nullText = new Text(); - - protected Text getPartition(Key key) { - return key.getRow(); - } - - protected Text getTerm(Key key) { - return key.getColumnFamily(); - } - - protected Text getDocID(Key key) { - return key.getColumnQualifier(); - } - - protected Key buildKey(Text partition, Text term) { - return new Key(partition, (term == null) ? nullText : term); - } - - protected Key buildKey(Text partition, Text term, Text docID) { - return new Key(partition, (term == null) ? nullText : term, docID); - } - - protected Key buildFollowingPartitionKey(Key key) { - return key.followingKey(PartialKey.ROW); - } - - protected static final Logger log = Logger.getLogger(AndingIterator.class); - - protected static class TermSource { - public SortedKeyValueIterator iter; - public Text term; - public Collection seekColfams; - public boolean notFlag; - - public TermSource(TermSource other) { - this.iter = other.iter; - this.term = other.term; - this.notFlag = other.notFlag; - this.seekColfams = other.seekColfams; - } - - public TermSource(SortedKeyValueIterator iter, Text term) { - this(iter, term, false); - } - - public TermSource(SortedKeyValueIterator iter, Text term, boolean notFlag) { - this.iter = iter; - this.term = term; - this.notFlag = notFlag; - // The desired column families for this source is the term itself - - // handle the case where the term is null. - if (term == null) { - this.seekColfams = Collections. emptyList(); - } else { - this.seekColfams = Collections. singletonList(new ArrayByteSequence(term.getBytes(), 0, term.getLength())); - } - } - - public String getTermString() { - return (this.term == null) ? new String("Iterator") : this.term.toString(); - } - } - - TermSource[] sources; - int sourcesCount = 0; - - Range overallRange; - - // query-time settings - protected Text currentPartition = null; - protected Text currentDocID = new Text(emptyByteArray); - static final byte[] emptyByteArray = new byte[0]; - - protected Key topKey = null; - protected Value value = new Value(emptyByteArray); - - public AndingIterator() { - } - - @Override - public SortedKeyValueIterator deepCopy(IteratorEnvironment env) { - return new AndingIterator(this, env); - } - - private AndingIterator(AndingIterator other, IteratorEnvironment env) { - if (other.sources != null) { - sourcesCount = other.sourcesCount; - sources = new TermSource[sourcesCount]; - for (int i = 0; i < sourcesCount; i++) { - sources[i] = new TermSource(other.sources[i].iter.deepCopy(env), other.sources[i].term); - } - } - } - - @Override - public Key getTopKey() { - return topKey; - } - - @Override - public Value getTopValue() { - // we don't really care about values - return value; - } - - @Override - public boolean hasTop() { - return currentPartition != null; - } - - // precondition: currentRow is not null - private boolean seekOneSource(int sourceID) throws IOException { - // find the next key in the appropriate column family that is at or beyond the cursor (currentRow, currentCQ) - // advance the cursor if this source goes beyond it - // return whether we advanced the cursor - - // within this loop progress must be made in one of the following forms: - // - currentRow or currentCQ must be increased - // - the given source must advance its iterator - // this loop will end when any of the following criteria are met - // - the iterator for the given source is pointing to the key (currentRow, columnFamilies[sourceID], currentCQ) - // - the given source is out of data and currentRow is set to null - // - the given source has advanced beyond the endRow and currentRow is set to null - boolean advancedCursor = false; - - if (sources[sourceID].notFlag) { - while (true) { - if (sources[sourceID].iter.hasTop() == false) { - // an empty column that you are negating is a valid condition - break; - } - // check if we're past the end key - int endCompare = -1; - // we should compare the row to the end of the range - if (overallRange.getEndKey() != null) { - endCompare = overallRange.getEndKey().getRow().compareTo(sources[sourceID].iter.getTopKey().getRow()); - if ((!overallRange.isEndKeyInclusive() && endCompare <= 0) || endCompare < 0) { - // an empty column that you are negating is a valid condition - break; - } - } - int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey())); - // check if this source is already at or beyond currentRow - // if not, then seek to at least the current row - - if (partitionCompare > 0) { - // seek to at least the currentRow - Key seekKey = buildKey(currentPartition, sources[sourceID].term); - sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true); - continue; - } - // check if this source has gone beyond currentRow - // if so, this is a valid condition for negation - if (partitionCompare < 0) { - break; - } - // we have verified that the current source is positioned in currentRow - // now we must make sure we're in the right columnFamily in the current row - // Note: Iterators are auto-magically set to the correct columnFamily - if (sources[sourceID].term != null) { - int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey())); - // check if this source is already on the right columnFamily - // if not, then seek forwards to the right columnFamily - if (termCompare > 0) { - Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID); - sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true); - continue; - } - // check if this source is beyond the right columnFamily - // if so, then this is a valid condition for negating - if (termCompare < 0) { - break; - } - } - - // we have verified that we are in currentRow and the correct column family - // make sure we are at or beyond columnQualifier - Text docID = getDocID(sources[sourceID].iter.getTopKey()); - int docIDCompare = currentDocID.compareTo(docID); - // If we are past the target, this is a valid result - if (docIDCompare < 0) { - break; - } - // if this source is not yet at the currentCQ then advance in this source - if (docIDCompare > 0) { - // seek forwards - Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID); - sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true); - continue; - } - // if we are equal to the target, this is an invalid result. - // Force the entire process to go to the next row. - // We are advancing column 0 because we forced that column to not contain a ! - // when we did the init() - if (docIDCompare == 0) { - sources[0].iter.next(); - advancedCursor = true; - break; - } - } - } else { - while (true) { - if (sources[sourceID].iter.hasTop() == false) { - currentPartition = null; - // setting currentRow to null counts as advancing the cursor - return true; - } - // check if we're past the end key - int endCompare = -1; - // we should compare the row to the end of the range - - if (overallRange.getEndKey() != null) { - endCompare = overallRange.getEndKey().getRow().compareTo(sources[sourceID].iter.getTopKey().getRow()); - if ((!overallRange.isEndKeyInclusive() && endCompare <= 0) || endCompare < 0) { - currentPartition = null; - // setting currentRow to null counts as advancing the cursor - return true; - } - } - int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey())); - // check if this source is already at or beyond currentRow - // if not, then seek to at least the current row - if (partitionCompare > 0) { - // seek to at least the currentRow - Key seekKey = buildKey(currentPartition, sources[sourceID].term); - sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true); - continue; - } - // check if this source has gone beyond currentRow - // if so, advance currentRow - if (partitionCompare < 0) { - currentPartition.set(getPartition(sources[sourceID].iter.getTopKey())); - currentDocID.set(emptyByteArray); - advancedCursor = true; - continue; - } - // we have verified that the current source is positioned in currentRow - // now we must make sure we're in the right columnFamily in the current row - // Note: Iterators are auto-magically set to the correct columnFamily - - if (sources[sourceID].term != null) { - int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey())); - // check if this source is already on the right columnFamily - // if not, then seek forwards to the right columnFamily - if (termCompare > 0) { - Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID); - sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true); - continue; - } - // check if this source is beyond the right columnFamily - // if so, then seek to the next row - if (termCompare < 0) { - // we're out of entries in the current row, so seek to the next one - // byte[] currentRowBytes = currentRow.getBytes(); - // byte[] nextRow = new byte[currentRowBytes.length + 1]; - // System.arraycopy(currentRowBytes, 0, nextRow, 0, currentRowBytes.length); - // nextRow[currentRowBytes.length] = (byte)0; - // // we should reuse text objects here - // sources[sourceID].seek(new Key(new Text(nextRow),columnFamilies[sourceID])); - if (endCompare == 0) { - // we're done - currentPartition = null; - // setting currentRow to null counts as advancing the cursor - return true; - } - Key seekKey = buildFollowingPartitionKey(sources[sourceID].iter.getTopKey()); - sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true); - continue; - } - } - // we have verified that we are in currentRow and the correct column family - // make sure we are at or beyond columnQualifier - Text docID = getDocID(sources[sourceID].iter.getTopKey()); - int docIDCompare = currentDocID.compareTo(docID); - // if this source has advanced beyond the current column qualifier then advance currentCQ and return true - if (docIDCompare < 0) { - currentDocID.set(docID); - advancedCursor = true; - break; - } - // if this source is not yet at the currentCQ then seek in this source - if (docIDCompare > 0) { - // seek forwards - Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID); - sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true); - continue; - } - // this source is at the current row, in its column family, and at currentCQ - break; - } - } - return advancedCursor; - } - - @Override - public void next() throws IOException { - if (currentPartition == null) { - return; - } - // precondition: the current row is set up and the sources all have the same column qualifier - // while we don't have a match, seek in the source with the smallest column qualifier - sources[0].iter.next(); - advanceToIntersection(); - } - - protected void advanceToIntersection() throws IOException { - boolean cursorChanged = true; - while (cursorChanged) { - // seek all of the sources to at least the highest seen column qualifier in the current row - cursorChanged = false; - for (int i = 0; i < sourcesCount; i++) { - if (currentPartition == null) { - topKey = null; - return; - } - if (seekOneSource(i)) { - cursorChanged = true; - break; - } - } - } - topKey = buildKey(currentPartition, nullText, currentDocID); - } - - public static String stringTopKey(SortedKeyValueIterator iter) { - if (iter.hasTop()) - return iter.getTopKey().toString(); - return ""; - } - - private static final String columnFamiliesOptionName = "columnFamilies"; - private static final String notFlagOptionName = "notFlag"; - - /** - * @param columns - * @return encoded columns - * @deprecated since 1.4. To be made protected. Do not interact with flags string directly, just use - * {@link #setColumnFamilies(IteratorSetting, Text[], boolean[])}. - */ - public static String encodeColumns(Text[] columns) { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < columns.length; i++) { - sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i])))); - sb.append('\n'); - } - return sb.toString(); - } - - /** - * @param flags - * @return encoded flags - * @deprecated since 1.4. To be made protected. Do not interact with flags string directly, just use - * {@link #setColumnFamilies(IteratorSetting, Text[], boolean[])}. - */ - public static String encodeBooleans(boolean[] flags) { - byte[] bytes = new byte[flags.length]; - for (int i = 0; i < flags.length; i++) { - if (flags[i]) - bytes[i] = 1; - else - bytes[i] = 0; - } - return new String(Base64.encodeBase64(bytes)); - } - - protected static Text[] decodeColumns(String columns) { - String[] columnStrings = columns.split("\n"); - Text[] columnTexts = new Text[columnStrings.length]; - for (int i = 0; i < columnStrings.length; i++) { - columnTexts[i] = new Text(Base64.decodeBase64(columnStrings[i].getBytes())); - } - return columnTexts; - } - - /** - * to be made protected - * - * @param flags - * @return decoded flags - * @deprecated since 1.4. To be made protected. Do not interact with flags string directly, just use - * {@link #setColumnFamilies(IteratorSetting, Text[], boolean[])}. - */ - public static boolean[] decodeBooleans(String flags) { - // return null of there were no flags - if (flags == null) - return null; - - byte[] bytes = Base64.decodeBase64(flags.getBytes()); - boolean[] bFlags = new boolean[bytes.length]; - for (int i = 0; i < bytes.length; i++) { - if (bytes[i] == 1) - bFlags[i] = true; - else - bFlags[i] = false; - } - return bFlags; - } - - @Override - public void init(SortedKeyValueIterator source, Map options, IteratorEnvironment env) throws IOException { - Text[] terms = decodeColumns(options.get(columnFamiliesOptionName)); - boolean[] notFlag = decodeBooleans(options.get(notFlagOptionName)); - - if (terms.length < 2) { - throw new IllegalArgumentException("IntersectionIterator requires two or more columns families"); - } - - // Scan the not flags. - // There must be at least one term that isn't negated - // And we are going to re-order such that the first term is not a ! term - if (notFlag == null) { - notFlag = new boolean[terms.length]; - for (int i = 0; i < terms.length; i++) - notFlag[i] = false; - } - if (notFlag[0]) { - for (int i = 1; i < notFlag.length; i++) { - if (notFlag[i] == false) { - Text swapFamily = new Text(terms[0]); - terms[0].set(terms[i]); - terms[i].set(swapFamily); - notFlag[0] = false; - notFlag[i] = true; - break; - } - } - if (notFlag[0]) { - throw new IllegalArgumentException("IntersectionIterator requires at lest one column family without not"); - } - } - - sources = new TermSource[terms.length]; - sources[0] = new TermSource(source, terms[0]); - for (int i = 1; i < terms.length; i++) { - sources[i] = new TermSource(source.deepCopy(env), terms[i], notFlag[i]); - } - sourcesCount = terms.length; - } - - @Override - public void seek(Range range, Collection seekColumnFamilies, boolean inclusive) throws IOException { - overallRange = new Range(range); - currentPartition = new Text(); - currentDocID.set(emptyByteArray); - - // seek each of the sources to the right column family within the row given by key - for (int i = 0; i < sourcesCount; i++) { - Key sourceKey; - if (range.getStartKey() != null) { - if (range.getStartKey().getColumnQualifier() != null) { - sourceKey = buildKey(getPartition(range.getStartKey()), sources[i].term, range.getStartKey().getColumnQualifier()); - } else { - sourceKey = buildKey(getPartition(range.getStartKey()), sources[i].term); - } - // Seek only to the term for this source as a column family - sources[i].iter.seek(new Range(sourceKey, true, null, false), sources[i].seekColfams, true); - } else { - // Seek only to the term for this source as a column family - sources[i].iter.seek(range, sources[i].seekColfams, true); - } - } - advanceToIntersection(); - } - - public void addSource(SortedKeyValueIterator source, IteratorEnvironment env, Text term, boolean notFlag) { - // Check if we have space for the added Source - if (sources == null) { - sources = new TermSource[1]; - } else { - // allocate space for node, and copy current tree. - // TODO: Should we change this to an ArrayList so that we can just add() ? - TermSource[] localSources = new TermSource[sources.length + 1]; - int currSource = 0; - for (TermSource myTerm : sources) { - // TODO: Do I need to call new here? or can I just re-use the term? - localSources[currSource] = new TermSource(myTerm); - currSource++; - } - sources = localSources; - } - sources[sourcesCount] = new TermSource(source.deepCopy(env), term, notFlag); - sourcesCount++; - } - - /** - * Encode the columns to be used when iterating. - * - * @param cfg - * @param columns - */ - public static void setColumnFamilies(IteratorSetting cfg, Text[] columns) { - if (columns.length < 2) - throw new IllegalArgumentException("Must supply at least two terms to intersect"); - cfg.addOption(AndingIterator.columnFamiliesOptionName, AndingIterator.encodeColumns(columns)); - } - - /** - * Encode columns and NOT flags indicating which columns should be negated (docIDs will be excluded if matching negated columns, instead - * of included). - * - * @param cfg - * @param columns - * @param notFlags - */ - public static void setColumnFamilies(IteratorSetting cfg, Text[] columns, boolean[] notFlags) { - if (columns.length < 2) - throw new IllegalArgumentException("Must supply at least two terms to intersect"); - if (columns.length != notFlags.length) - throw new IllegalArgumentException("columns and notFlags arrays must be the same length"); - setColumnFamilies(cfg, columns); - cfg.addOption(AndingIterator.notFlagOptionName, AndingIterator.encodeBooleans(notFlags)); - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java deleted file mode 100644 index a69b78a0d..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java +++ /dev/null @@ -1,322 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext.iterators; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.allChildrenAreNot; -import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.findFirstNonNotChild; -import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.getNodeIterator; -import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.isNotFlag; -import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.pushChild; -import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.swapChildren; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; - -import mvm.rya.indexing.accumulo.freetext.ColumnPrefixes; -import mvm.rya.indexing.accumulo.freetext.query.ASTExpression; -import mvm.rya.indexing.accumulo.freetext.query.ASTTerm; -import mvm.rya.indexing.accumulo.freetext.query.ParseException; -import mvm.rya.indexing.accumulo.freetext.query.QueryParser; -import mvm.rya.indexing.accumulo.freetext.query.QueryParserTreeConstants; -import mvm.rya.indexing.accumulo.freetext.query.SimpleNode; -import mvm.rya.indexing.accumulo.freetext.query.TokenMgrError; - -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.data.ByteSequence; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.iterators.IteratorEnvironment; -import org.apache.accumulo.core.iterators.OptionDescriber; -import org.apache.accumulo.core.iterators.SortedKeyValueIterator; -import org.apache.accumulo.core.iterators.system.MultiIterator; -import org.apache.commons.lang.Validate; -import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; - -public class BooleanTreeIterator implements SortedKeyValueIterator, OptionDescriber { - private static Logger logger = Logger.getLogger(BooleanTreeIterator.class); - - private static String queryOptionName = "query"; - - private SortedKeyValueIterator iter; - private SortedKeyValueIterator docSource; - - @Override - public void init(SortedKeyValueIterator source, Map options, IteratorEnvironment env) throws IOException { - - // pull out the query - String query = options.get(queryOptionName); - - // create the parse tree - SimpleNode root; - try { - root = QueryParser.parse(query); - } catch (ParseException e) { - // log and wrap in IOException - logger.error("ParseException encountered while parsing: " + query, e); - throw new IOException(e); - } catch (TokenMgrError e) { - // log and wrap in IOException - logger.error("TokenMgrError encountered while parsing: " + query, e); - throw new IOException(e); - } - - docSource = source.deepCopy(env); - iter = createIterator((SimpleNode) root.jjtGetChild(0), source, env); - } - - private SortedKeyValueIterator createIterator(SimpleNode root, SortedKeyValueIterator source, - IteratorEnvironment env) { - // if the root is only a single term, wrap it in an expression node - if (root instanceof ASTTerm) { - ASTExpression expression = new ASTExpression(QueryParserTreeConstants.JJTEXPRESSION); - expression.setNotFlag(false); - expression.setType(ASTExpression.AND); - - pushChild(expression, root); - root.jjtSetParent(expression); - - root = expression; - } - - // Pre-process the tree to compensate for iterator specific issues with certain topologies - preProcessTree(root); - - // Build an iterator tree - return createIteratorRecursive(root, source, env); - } - - private SortedKeyValueIterator createIteratorRecursive(SimpleNode node, SortedKeyValueIterator source, - IteratorEnvironment env) { - - Validate.isTrue(node instanceof ASTExpression, "node must be of type ASTExpression. Node is instance of " - + node.getClass().getName()); - - ASTExpression expression = (ASTExpression) node; - - if (expression.getType().equals(ASTExpression.AND)) { - return getAndIterator(node, source, env); - } - - if (expression.getType().equals(ASTExpression.OR)) { - return getOrIterator(node, source, env); - } - - throw new IllegalArgumentException("Expression is of unknown type: " + expression.getType()); - - } - - private MultiIterator getOrIterator(SimpleNode node, SortedKeyValueIterator source, IteratorEnvironment env) { - List> iters = new ArrayList>(); - - for (SimpleNode n : getNodeIterator(node)) { - if (n instanceof ASTExpression) { - iters.add(createIteratorRecursive(n, source, env)); - } else if (n instanceof ASTTerm) { - iters.add(getSimpleAndingIterator((ASTTerm) n, source, env)); - } else { - throw new IllegalArgumentException("Node is of unknown type: " + n.getClass().getName()); - } - } - - return new MultiIterator(iters, new Range()); - } - - private AndingIterator getAndIterator(SimpleNode node, SortedKeyValueIterator source, IteratorEnvironment env) { - - AndingIterator anding = new AndingIterator(); - - for (SimpleNode n : getNodeIterator(node)) { - boolean isNotFlag = isNotFlag(n); - if (n instanceof ASTExpression) { - anding.addSource(createIteratorRecursive(n, source, env), env, null, isNotFlag); - } else if (n instanceof ASTTerm) { - ASTTerm term = ((ASTTerm) n); - anding.addSource(source, env, getTermColFam(term), isNotFlag); - } else { - throw new IllegalArgumentException("Node is of unknown type: " + n.getClass().getName()); - } - } - - return anding; - } - - private static Text getTermColFam(ASTTerm termnode) { - String term = termnode.getTerm(); - if (term == null) { - // if the term is null, then I want all of the documents - return ColumnPrefixes.DOCS_CF_PREFIX; - } - if (term.contains("\0")) { - // if the term is contain a null char, then it's already formated for a CF - return new Text(term); - } - - // otherwise, point to the term CF - return ColumnPrefixes.getTermColFam(term.toLowerCase()); - } - - private AndingIterator getSimpleAndingIterator(ASTTerm node, SortedKeyValueIterator source, IteratorEnvironment env) { - Validate.isTrue(!node.isNotFlag(), "Simple Anding node must not have \"not\" flag set"); - - AndingIterator anding = new AndingIterator(); - anding.addSource(source, env, getTermColFam(node), false); - return anding; - } - - /** - * Handle "lonely nots" (i.e. expressions with only nots), "or" statements containing nots, and make sure that the first term in an - * "and" statement is not a not. This is due to implementation specific limitations of the iterators. - *

- * For example: - *

    - *
  • lonely nots: (!a & !b) -> [all] & !a & !b
  • - *
  • "or" nots: (!a | b) -> ( ([all] & !a) | b)
  • - *
  • reorder "and" nots: (!a & b) -> ( b & !a )
  • - *
- **/ - public static void preProcessTree(SimpleNode s) { - for (SimpleNode child : getNodeIterator(s)) { - preProcessTree(child); - } - - if (s instanceof ASTExpression) { - ASTExpression expression = (ASTExpression) s; - - if (expression.getType().equals(ASTExpression.AND)) { - if (allChildrenAreNot(expression)) { - // lonely nots: (!a & !b) -> [all] & !a & !b - ASTTerm allDocsTerm = createAllDocTermNode(); - pushChild(expression, allDocsTerm); - } else if (isNotFlag(expression.jjtGetChild(0))) { - // reorder "and" nots: (!a & b) -> ( b & !a ) - int firstNonNotChild = findFirstNonNotChild(expression); - swapChildren(expression, 0, firstNonNotChild); - } - } - - if (expression.getType().equals(ASTExpression.OR)) { - for (int i = 0; i < expression.jjtGetNumChildren(); i++) { - SimpleNode child = (SimpleNode) expression.jjtGetChild(i); - if (isNotFlag(child)) { - // "or" nots: (!a | b) -> ( ([all] & !a) | b) - // create the new expression - ASTExpression newExpression = new ASTExpression(QueryParserTreeConstants.JJTEXPRESSION); - newExpression.setNotFlag(false); - newExpression.setType(ASTExpression.AND); - pushChild(newExpression, child); - pushChild(newExpression, createAllDocTermNode()); - - // tie the new expression to the old one - newExpression.jjtSetParent(expression); - expression.jjtAddChild(newExpression, i); - } - } - } - } - - } - - public static ASTTerm createAllDocTermNode() { - ASTTerm t = new ASTTerm(QueryParserTreeConstants.JJTTERM); - t.setNotFlag(false); - t.setType(ASTTerm.TERM); - // note: a "null" signifies "all docs" should be returned. - t.setTerm(null); - return t; - } - - @Override - public boolean hasTop() { - return iter.hasTop(); - } - - @Override - public void next() throws IOException { - iter.next(); - if (iter.hasTop()) { - seekDocSource(iter.getTopKey()); - } - } - - @Override - public void seek(Range range, Collection columnFamilies, boolean inclusive) throws IOException { - iter.seek(range, columnFamilies, inclusive); - if (iter.hasTop()) { - seekDocSource(iter.getTopKey()); - } - } - - private void seekDocSource(Key key) throws IOException { - Key docKey = new Key(key.getRow(), ColumnPrefixes.DOCS_CF_PREFIX, key.getColumnQualifier()); - docSource.seek(new Range(docKey, true, null, false), Collections. emptyList(), false); - } - - @Override - public Key getTopKey() { - // from intersecting iterator: - // RowID: shardID - // CF: (empty) - // CQ: docID - return iter.getTopKey(); - } - - @Override - public Value getTopValue() { - if (!iter.hasTop()) { - throw new NoSuchElementException(); - } - - return docSource.getTopValue(); - } - - @Override - public SortedKeyValueIterator deepCopy(IteratorEnvironment env) { - throw new UnsupportedOperationException(); - } - - public static void setQuery(IteratorSetting cfg, String query) { - cfg.addOption(BooleanTreeIterator.queryOptionName, query); - } - - @Override - public IteratorOptions describeOptions() { - return new IteratorOptions("FreeTextBooleanTree", "Perform a FreeText Query on properly formated table", - Collections.singletonMap(queryOptionName, "the free text query"), - null); - } - - @Override - public boolean validateOptions(Map options) { - String q = options.get(queryOptionName); - if (q == null || q.isEmpty()) - throw new IllegalArgumentException(queryOptionName + " must not be empty"); - return true; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java deleted file mode 100644 index 95783e54f..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java +++ /dev/null @@ -1,63 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * This is a slightly modified version of the ASTExpression file created by JavaCC. This version adds more state to the standard ASTTerm - * file including a "type", and "notFlag". - */ -public class ASTExpression extends SimpleNode { - public static final String AND = "AND"; - public static final String OR = "OR"; - - private String type = ""; - private boolean notFlag = false; - - public ASTExpression(int id) { - super(id); - } - - public ASTExpression(QueryParser p, int id) { - super(p, id); - } - - public void setType(String type) { - this.type = type; - } - - public String getType() { - return type; - } - - public boolean isNotFlag() { - return notFlag; - } - - public void setNotFlag(boolean notFlag) { - this.notFlag = notFlag; - } - - @Override - public String toString() { - return super.toString() + " [type: " + type + ", notFlag: " + notFlag + "]"; - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java deleted file mode 100644 index 27edaac35..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java +++ /dev/null @@ -1,210 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.Validate; - -public class ASTNodeUtils { - - /** - * Serialize a node (and it's children) to a parsable string. - * - * @param s - * @return - */ - public static String serializeExpression(Node s) { - if (s instanceof ASTTerm) { - ASTTerm a = (ASTTerm) s; - return (a.isNotFlag() ? "!" : "") + " " + a.getTerm(); - } - - String prefix = ""; - String suffix = ""; - String join = " "; - if (s instanceof ASTExpression) { - ASTExpression a = (ASTExpression) s; - prefix = (a.isNotFlag() ? "!" : "") + "("; - suffix = ")"; - join = " " + a.getType() + " "; - } - - List children = new ArrayList(); - for (int i = 0; i < s.jjtGetNumChildren(); i++) { - children.add(serializeExpression(s.jjtGetChild(i))); - } - return prefix + StringUtils.join(children, join) + suffix; - - } - - /** - * count the number of terms in this query tree. - * - * @param node - * @return - */ - public static int termCount(Node node) { - if (node instanceof SimpleNode) { - int count = 0; - for (SimpleNode n : getNodeIterator((SimpleNode) node)) { - count += termCount(n); - } - return count; - } else if (node instanceof ASTTerm) { - return 1; - } else { - throw new IllegalArgumentException("Node is of unknown type: " + node.getClass().getName()); - } - } - - /** - * Add the child as the parent's first child. - * - * @param parent - * @param child - */ - public static void pushChild(SimpleNode parent, SimpleNode child) { - // note: this implementation is very coupled with the SimpleNode jjt implementation - int parentSize = parent.jjtGetNumChildren(); - - // expand the parent node - parent.jjtAddChild(null, parentSize); - - // get the current head child - Node currentHeadChild = parent.jjtGetChild(0); - - // set the parameter as the parent's first child - parent.jjtAddChild(child, 0); - - // add the former head child to the end of the list - if (currentHeadChild != null) { - parent.jjtAddChild(currentHeadChild, parentSize); - } - - // tie the child to the parent - child.jjtSetParent(parent); - - } - - /** - * Get the index of the child, -1 if child not found. - * - * @param parent - * @param child - */ - public static int getChildIndex(SimpleNode parent, SimpleNode child) { - int parentSize = parent.jjtGetNumChildren(); - - for (int i = 0; i < parentSize; i++) { - if (child.equals(parent.jjtGetChild(i))) { - return i; - } - } - - return -1; - } - - /** - * return true is all of the node's children have the not flag enabled. - * - * @param node - * @return - */ - public static boolean allChildrenAreNot(ASTExpression node) { - for (SimpleNode child : getNodeIterator(node)) { - if (!isNotFlag(child)) { - return false; - } - } - return true; - } - - /** - * return the node's not flag value. node must be of type {@link ASTTerm} or {@link ASTExpression} - * - * @param node - * @return - */ - public static boolean isNotFlag(Node node) { - if (node instanceof ASTExpression) { - return ((ASTExpression) node).isNotFlag(); - } else if (node instanceof ASTTerm) { - return ((ASTTerm) node).isNotFlag(); - } else { - throw new IllegalArgumentException("Node is of unknown type: " + node.getClass().getName()); - } - } - - public static Iterable getNodeIterator(final SimpleNode n) { - return new Iterable() { - - @Override - public Iterator iterator() { - return new Iterator() { - int pointer = 0; - - @Override - public boolean hasNext() { - return pointer < n.jjtGetNumChildren(); - } - - @Override - public SimpleNode next() { - Node rtn = n.jjtGetChild(pointer); - pointer++; - return (SimpleNode) rtn; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - }; - } - - public static void swapChildren(ASTExpression parent, int childOneIndex, int childTwoIndex) { - Validate.isTrue(childOneIndex > -1 && childOneIndex < parent.jjtGetNumChildren()); - Validate.isTrue(childTwoIndex > -1 && childTwoIndex < parent.jjtGetNumChildren()); - - Node childOne = parent.jjtGetChild(childOneIndex); - Node childTwo = parent.jjtGetChild(childTwoIndex); - parent.jjtAddChild(childOne, childTwoIndex); - parent.jjtAddChild(childTwo, childOneIndex); - } - - public static int findFirstNonNotChild(ASTExpression expression) { - for (int i = 0; i < expression.jjtGetNumChildren(); i++) { - if (!isNotFlag(expression.jjtGetChild(i))) { - return i; - } - } - return -1; - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java deleted file mode 100644 index 71ff16a3e..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java +++ /dev/null @@ -1,917 +0,0 @@ -/* Generated By:JJTree: Do not edit this line. ASTSimpleNode.java Version 4.3 */ -/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -public -class ASTSimpleNode extends SimpleNode { - public ASTSimpleNode(int id) { - super(id); - } - - public ASTSimpleNode(QueryParser p, int id) { - super(p, id); - } - -} -/* JavaCC - OriginalChecksum=8a57fc385ee56c7039cbbc4132eb8e0c (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java deleted file mode 100644 index 62320969e..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java +++ /dev/null @@ -1,79 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * This is a slightly modified version of the ASTTerm file created by JavaCC. This version adds more state to the standard ASTTerm file - * including a "term", "type", and "notFlag". - */ -public class ASTTerm extends SimpleNode { - public static final String WILDTERM = "WILDTERM"; - public static final String PREFIXTERM = "PREFIXTERM"; - public static final String QUOTED = "QUOTED"; - public static final String TERM = "TERM"; - - private String term = ""; - private boolean notFlag = false; - private String type = ""; - - public ASTTerm(int id) { - super(id); - } - - public ASTTerm(QueryParser p, int id) { - super(p, id); - } - - @Override - public String toString() { - return super.toString() + "[notFlag: " + notFlag + " term: " + term + " type: " + type + "]"; - } - - @Override - public String toString(String prefix) { - return super.toString(prefix); - } - - public String getTerm() { - return term; - } - - public void setTerm(String term) { - this.term = term; - } - - public boolean isNotFlag() { - return notFlag; - } - - public void setNotFlag(boolean notFlag) { - this.notFlag = notFlag; - } - - public void setType(String type) { - this.type = type; - } - - public String getType() { - return type; - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java deleted file mode 100644 index dfcc429d7..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java +++ /dev/null @@ -1,1024 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. JJTQueryParserState.java Version 5.0 */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -public class JJTQueryParserState { - private java.util.List nodes; - private java.util.List marks; - - private int sp; // number of nodes on stack - private int mk; // current mark - private boolean node_created; - - public JJTQueryParserState() { - nodes = new java.util.ArrayList(); - marks = new java.util.ArrayList(); - sp = 0; - mk = 0; - } - - /* Determines whether the current node was actually closed and - pushed. This should only be called in the final user action of a - node scope. */ - public boolean nodeCreated() { - return node_created; - } - - /* Call this to reinitialize the node stack. It is called - automatically by the parser's ReInit() method. */ - public void reset() { - nodes.clear(); - marks.clear(); - sp = 0; - mk = 0; - } - - /* Returns the root node of the AST. It only makes sense to call - this after a successful parse. */ - public Node rootNode() { - return nodes.get(0); - } - - /* Pushes a node on to the stack. */ - public void pushNode(Node n) { - nodes.add(n); - ++sp; - } - - /* Returns the node on the top of the stack, and remove it from the - stack. */ - public Node popNode() { - if (--sp < mk) { - mk = marks.remove(marks.size()-1); - } - return nodes.remove(nodes.size()-1); - } - - /* Returns the node currently on the top of the stack. */ - public Node peekNode() { - return nodes.get(nodes.size()-1); - } - - /* Returns the number of children on the stack in the current node - scope. */ - public int nodeArity() { - return sp - mk; - } - - - public void clearNodeScope(Node n) { - while (sp > mk) { - popNode(); - } - mk = marks.remove(marks.size()-1); - } - - - public void openNodeScope(Node n) { - marks.add(mk); - mk = sp; - n.jjtOpen(); - } - - - /* A definite node is constructed from a specified number of - children. That number of nodes are popped from the stack and - made the children of the definite node. Then the definite node - is pushed on to the stack. */ - public void closeNodeScope(Node n, int num) { - mk = marks.remove(marks.size()-1); - while (num-- > 0) { - Node c = popNode(); - c.jjtSetParent(n); - n.jjtAddChild(c, num); - } - n.jjtClose(); - pushNode(n); - node_created = true; - } - - - /* A conditional node is constructed if its condition is true. All - the nodes that have been pushed since the node was opened are - made children of the conditional node, which is then pushed - on to the stack. If the condition is false the node is not - constructed and they are left on the stack. */ - public void closeNodeScope(Node n, boolean condition) { - if (condition) { - int a = nodeArity(); - mk = marks.remove(marks.size()-1); - while (a-- > 0) { - Node c = popNode(); - c.jjtSetParent(n); - n.jjtAddChild(c, a); - } - n.jjtClose(); - pushNode(n); - node_created = true; - } else { - mk = marks.remove(marks.size()-1); - node_created = false; - } - } -} -/* JavaCC - OriginalChecksum=3658fa072c0d78e6abb5a6e2b4509dc4 (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java deleted file mode 100644 index 1ef0bad75..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java +++ /dev/null @@ -1,937 +0,0 @@ -/* Generated By:JJTree: Do not edit this line. Node.java Version 4.3 */ -/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/* All AST nodes must implement this interface. It provides basic - machinery for constructing the parent and child relationships - between nodes. */ - -public -interface Node { - - /** This method is called after the node has been made the current - node. It indicates that child nodes can now be added to it. */ - public void jjtOpen(); - - /** This method is called after all the child nodes have been - added. */ - public void jjtClose(); - - /** This pair of methods are used to inform the node of its - parent. */ - public void jjtSetParent(Node n); - public Node jjtGetParent(); - - /** This method tells the node to add its argument to the node's - list of children. */ - public void jjtAddChild(Node n, int i); - - /** This method returns a child node. The children are numbered - from zero, left to right. */ - public Node jjtGetChild(int i); - - /** Return the number of children the node has. */ - public int jjtGetNumChildren(); -} -/* JavaCC - OriginalChecksum=e66efa9c359bf70af0cdb4f33bea0630 (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java deleted file mode 100644 index 92007709c..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java +++ /dev/null @@ -1,1088 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 5.0 */ -/* JavaCCOptions:KEEP_LINE_COL=null */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * This exception is thrown when parse errors are encountered. - * You can explicitly create objects of this exception type by - * calling the method generateParseException in the generated - * parser. - * - * You can modify this class to customize your error reporting - * mechanisms so long as you retain the public fields. - */ -public class ParseException extends Exception { - - /** - * The version identifier for this Serializable class. - * Increment only if the serialized form of the - * class changes. - */ - private static final long serialVersionUID = 1L; - - /** - * This constructor is used by the method "generateParseException" - * in the generated parser. Calling this constructor generates - * a new object of this type with the fields "currentToken", - * "expectedTokenSequences", and "tokenImage" set. - */ - public ParseException(Token currentTokenVal, - int[][] expectedTokenSequencesVal, - String[] tokenImageVal - ) - { - super(initialise(currentTokenVal, expectedTokenSequencesVal, tokenImageVal)); - currentToken = currentTokenVal; - expectedTokenSequences = expectedTokenSequencesVal; - tokenImage = tokenImageVal; - } - - /** - * The following constructors are for use by you for whatever - * purpose you can think of. Constructing the exception in this - * manner makes the exception behave in the normal way - i.e., as - * documented in the class "Throwable". The fields "errorToken", - * "expectedTokenSequences", and "tokenImage" do not contain - * relevant information. The JavaCC generated code does not use - * these constructors. - */ - - public ParseException() { - super(); - } - - /** Constructor with message. */ - public ParseException(String message) { - super(message); - } - - - /** - * This is the last token that has been consumed successfully. If - * this object has been created due to a parse error, the token - * followng this token will (therefore) be the first error token. - */ - public Token currentToken; - - /** - * Each entry in this array is an array of integers. Each array - * of integers represents a sequence of tokens (by their ordinal - * values) that is expected at this point of the parse. - */ - public int[][] expectedTokenSequences; - - /** - * This is a reference to the "tokenImage" array of the generated - * parser within which the parse error occurred. This array is - * defined in the generated ...Constants interface. - */ - public String[] tokenImage; - - /** - * It uses "currentToken" and "expectedTokenSequences" to generate a parse - * error message and returns it. If this object has been created - * due to a parse error, and you do not catch it (it gets thrown - * from the parser) the correct error message - * gets displayed. - */ - private static String initialise(Token currentToken, - int[][] expectedTokenSequences, - String[] tokenImage) { - String eol = System.getProperty("line.separator", "\n"); - StringBuffer expected = new StringBuffer(); - int maxSize = 0; - for (int i = 0; i < expectedTokenSequences.length; i++) { - if (maxSize < expectedTokenSequences[i].length) { - maxSize = expectedTokenSequences[i].length; - } - for (int j = 0; j < expectedTokenSequences[i].length; j++) { - expected.append(tokenImage[expectedTokenSequences[i][j]]).append(' '); - } - if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { - expected.append("..."); - } - expected.append(eol).append(" "); - } - String retval = "Encountered \""; - Token tok = currentToken.next; - for (int i = 0; i < maxSize; i++) { - if (i != 0) retval += " "; - if (tok.kind == 0) { - retval += tokenImage[0]; - break; - } - retval += " " + tokenImage[tok.kind]; - retval += " \""; - retval += add_escapes(tok.image); - retval += " \""; - tok = tok.next; - } - retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; - retval += "." + eol; - if (expectedTokenSequences.length == 1) { - retval += "Was expecting:" + eol + " "; - } else { - retval += "Was expecting one of:" + eol + " "; - } - retval += expected.toString(); - return retval; - } - - /** - * The end of line string for this machine. - */ - protected String eol = System.getProperty("line.separator", "\n"); - - /** - * Used to convert raw characters to their escaped version - * when these raw version cannot be used as part of an ASCII - * string literal. - */ - static String add_escapes(String str) { - StringBuffer retval = new StringBuffer(); - char ch; - for (int i = 0; i < str.length(); i++) { - switch (str.charAt(i)) - { - case 0 : - continue; - case '\b': - retval.append("\\b"); - continue; - case '\t': - retval.append("\\t"); - continue; - case '\n': - retval.append("\\n"); - continue; - case '\f': - retval.append("\\f"); - continue; - case '\r': - retval.append("\\r"); - continue; - case '\"': - retval.append("\\\""); - continue; - case '\'': - retval.append("\\\'"); - continue; - case '\\': - retval.append("\\\\"); - continue; - default: - if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { - String s = "0000" + Integer.toString(ch, 16); - retval.append("\\u" + s.substring(s.length() - 4, s.length())); - } else { - retval.append(ch); - } - continue; - } - } - return retval.toString(); - } - -} -/* JavaCC - OriginalChecksum=b2d839f0eac1c1a2c3ce06515ce25b20 (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java deleted file mode 100644 index 65e712547..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java +++ /dev/null @@ -1,1293 +0,0 @@ -/* Generated By:JJTree&JavaCC: Do not edit this line. QueryParser.java */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.StringReader; - -public class QueryParser/*@bgen(jjtree)*/implements QueryParserTreeConstants, QueryParserConstants {/*@bgen(jjtree)*/ - protected JJTQueryParserState jjtree = new JJTQueryParserState(); - // Helper method to parse Strings (instead of streams) - public static SimpleNode parse(String query) throws ParseException, TokenMgrError - { - QueryParser parser = new QueryParser(new StringReader(query)); - return parser.Start(); - } - - final public SimpleNode Start() throws ParseException { - /*@bgen(jjtree) SimpleNode */ - ASTSimpleNode jjtn000 = new ASTSimpleNode(JJTSIMPLENODE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); - try { - OrExpression(); - jj_consume_token(0); - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - {if (true) return jjtn000;} - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } - throw new Error("Missing return statement in function"); - } - - final public void OrExpression() throws ParseException { - /*@bgen(jjtree) #Expression(> 1) */ - ASTExpression jjtn000 = new ASTExpression(JJTEXPRESSION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);jjtn000.setType(ASTExpression.OR); - try { - AndExpression(); - label_1: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case OR: - ; - break; - default: - jj_la1[0] = jj_gen; - break label_1; - } - jj_consume_token(OR); - AndExpression(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, jjtree.nodeArity() > 1); - } - } - } - - final public void AndExpression() throws ParseException { - /*@bgen(jjtree) #Expression(> 1) */ - ASTExpression jjtn000 = new ASTExpression(JJTEXPRESSION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000);jjtn000.setType(ASTExpression.AND); - try { - Term(); - label_2: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case AND: - case NOT: - case LPAREN: - case QUOTED: - case TERM: - case PREFIXTERM: - case WILDTERM: - ; - break; - default: - jj_la1[1] = jj_gen; - break label_2; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case AND: - jj_consume_token(AND); - break; - default: - jj_la1[2] = jj_gen; - ; - } - Term(); - } - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - {if (true) throw (RuntimeException)jjte000;} - } - if (jjte000 instanceof ParseException) { - {if (true) throw (ParseException)jjte000;} - } - {if (true) throw (Error)jjte000;} - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, jjtree.nodeArity() > 1); - } - } - } - - final public void Term() throws ParseException { - Token t; boolean notFlag = false; String type = ""; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case NOT: - jj_consume_token(NOT); - notFlag = true; - break; - default: - jj_la1[3] = jj_gen; - ; - } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case QUOTED: - case TERM: - case PREFIXTERM: - case WILDTERM: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case TERM: - t = jj_consume_token(TERM); - type = ASTTerm.TERM; - break; - case WILDTERM: - t = jj_consume_token(WILDTERM); - type = ASTTerm.WILDTERM; - break; - case PREFIXTERM: - t = jj_consume_token(PREFIXTERM); - type = ASTTerm.PREFIXTERM; - break; - case QUOTED: - t = jj_consume_token(QUOTED); - type = ASTTerm.QUOTED; - break; - default: - jj_la1[4] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - ASTTerm jjtn001 = new ASTTerm(JJTTERM); - boolean jjtc001 = true; - jjtree.openNodeScope(jjtn001); - try { - jjtree.closeNodeScope(jjtn001, true); - jjtc001 = false; - jjtn001.setTerm(t.image); jjtn001.setNotFlag(notFlag); jjtn001.setType(type); - } finally { - if (jjtc001) { - jjtree.closeNodeScope(jjtn001, true); - } - } - break; - case LPAREN: - jj_consume_token(LPAREN); - OrExpression(); - jj_consume_token(RPAREN); - // pass on the notFlag state to the sub-expression - // note: the sub-expression might be a term (eg, "a" is a term in "!(!a)") - { - if (notFlag) { - Node n = jjtree.peekNode(); - if (n instanceof ASTExpression) { - boolean v = ((ASTExpression)n).isNotFlag(); - ((ASTExpression)n).setNotFlag(v ^ notFlag); - } - if (n instanceof ASTTerm) { - boolean v = ((ASTTerm)n).isNotFlag(); - ((ASTTerm)n).setNotFlag(v ^ notFlag); - } - } - } - break; - default: - jj_la1[5] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } - - /** Generated Token Manager. */ - public QueryParserTokenManager token_source; - SimpleCharStream jj_input_stream; - /** Current token. */ - public Token token; - /** Next token. */ - public Token jj_nt; - private int jj_ntk; - private int jj_gen; - final private int[] jj_la1 = new int[6]; - static private int[] jj_la1_0; - static { - jj_la1_init_0(); - } - private static void jj_la1_init_0() { - jj_la1_0 = new int[] {0x40,0x75a0,0x20,0x80,0x7400,0x7500,}; - } - - /** Constructor with InputStream. */ - public QueryParser(java.io.InputStream stream) { - this(stream, null); - } - /** Constructor with InputStream and supplied encoding */ - public QueryParser(java.io.InputStream stream, String encoding) { - try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source = new QueryParserTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 6; i++) jj_la1[i] = -1; - } - - /** Reinitialise. */ - public void ReInit(java.io.InputStream stream) { - ReInit(stream, null); - } - /** Reinitialise. */ - public void ReInit(java.io.InputStream stream, String encoding) { - try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 6; i++) jj_la1[i] = -1; - } - - /** Constructor. */ - public QueryParser(java.io.Reader stream) { - jj_input_stream = new SimpleCharStream(stream, 1, 1); - token_source = new QueryParserTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 6; i++) jj_la1[i] = -1; - } - - /** Reinitialise. */ - public void ReInit(java.io.Reader stream) { - jj_input_stream.ReInit(stream, 1, 1); - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 6; i++) jj_la1[i] = -1; - } - - /** Constructor with generated Token Manager. */ - public QueryParser(QueryParserTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 6; i++) jj_la1[i] = -1; - } - - /** Reinitialise. */ - public void ReInit(QueryParserTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jjtree.reset(); - jj_gen = 0; - for (int i = 0; i < 6; i++) jj_la1[i] = -1; - } - - private Token jj_consume_token(int kind) throws ParseException { - Token oldToken; - if ((oldToken = token).next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - if (token.kind == kind) { - jj_gen++; - return token; - } - token = oldToken; - jj_kind = kind; - throw generateParseException(); - } - - -/** Get the next Token. */ - final public Token getNextToken() { - if (token.next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - jj_gen++; - return token; - } - -/** Get the specific Token. */ - final public Token getToken(int index) { - Token t = token; - for (int i = 0; i < index; i++) { - if (t.next != null) t = t.next; - else t = t.next = token_source.getNextToken(); - } - return t; - } - - private int jj_ntk() { - if ((jj_nt=token.next) == null) - return (jj_ntk = (token.next=token_source.getNextToken()).kind); - else - return (jj_ntk = jj_nt.kind); - } - - private java.util.List jj_expentries = new java.util.ArrayList(); - private int[] jj_expentry; - private int jj_kind = -1; - - /** Generate ParseException. */ - public ParseException generateParseException() { - jj_expentries.clear(); - boolean[] la1tokens = new boolean[16]; - if (jj_kind >= 0) { - la1tokens[jj_kind] = true; - jj_kind = -1; - } - for (int i = 0; i < 6; i++) { - if (jj_la1[i] == jj_gen) { - for (int j = 0; j < 32; j++) { - if ((jj_la1_0[i] & (1< -| -| -| -| -| )* "\""> -| <#_QUOTED_CHAR: ~[ "\""] > -| (<_TERM_CHAR>)* > -| > -| "*" > -| <#_TERM_CHAR: ~[ " ", "\t", "\n", "\r", "*", "(", ")", "!"] > -} - SimpleNode Start() : {/*@bgen(jjtree) SimpleNode */ - ASTSimpleNode jjtn000 = new ASTSimpleNode(JJTSIMPLENODE); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/} {/*@bgen(jjtree) SimpleNode */ - try { -/*@egen*/ - // "or"s have the lowest order of operations, so they will be highest on the tree. Start with them. OrExpression() < EOF >/*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn000, true); - jjtc000 = false; - } -/*@egen*/ { return jjtn000; }/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, true); - } - } -/*@egen*/ } - -void OrExpression() : -{/*@bgen(jjtree) #Expression(> 1) */ - ASTExpression jjtn000 = new ASTExpression(JJTEXPRESSION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ jjtn000.setType(ASTExpression.OR); } -{/*@bgen(jjtree) #Expression(> 1) */ - try { -/*@egen*/ - AndExpression() (< OR > AndExpression())*/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, jjtree.nodeArity() > 1); - } - } -/*@egen*/ } - -void AndExpression() : {/*@bgen(jjtree) #Expression(> 1) */ - ASTExpression jjtn000 = new ASTExpression(JJTEXPRESSION); - boolean jjtc000 = true; - jjtree.openNodeScope(jjtn000); -/*@egen*/ jjtn000.setType(ASTExpression.AND); } -{/*@bgen(jjtree) #Expression(> 1) */ - try { -/*@egen*/ Term() ([< AND >] Term())*/*@bgen(jjtree)*/ - } catch (Throwable jjte000) { - if (jjtc000) { - jjtree.clearNodeScope(jjtn000); - jjtc000 = false; - } else { - jjtree.popNode(); - } - if (jjte000 instanceof RuntimeException) { - throw (RuntimeException)jjte000; - } - if (jjte000 instanceof ParseException) { - throw (ParseException)jjte000; - } - throw (Error)jjte000; - } finally { - if (jjtc000) { - jjtree.closeNodeScope(jjtn000, jjtree.nodeArity() > 1); - } - } -/*@egen*/ -} - void Term() : { Token t; boolean notFlag = false; String type = ""; } { - // Update the notFlag if a "not" is present - [ < NOT > { notFlag = true; } ] - - ( // Create a term, if a term is present - ( t = < TERM > { type = ASTTerm.TERM; } | t = < WILDTERM > { type = ASTTerm.WILDTERM; } - | t = < PREFIXTERM > { type = ASTTerm.PREFIXTERM; } - | t = < QUOTED > { type = ASTTerm.QUOTED; } - )/*@bgen(jjtree) #Term(true) */ - { - ASTTerm jjtn001 = new ASTTerm(JJTTERM); - boolean jjtc001 = true; - jjtree.openNodeScope(jjtn001); - } - try { -/*@egen*//*@bgen(jjtree)*/ - { - jjtree.closeNodeScope(jjtn001, true); - jjtc001 = false; - } -/*@egen*/ { jjtn001.setTerm(t.image); jjtn001.setNotFlag(notFlag); jjtn001.setType(type); }/*@bgen(jjtree)*/ - } finally { - if (jjtc001) { - jjtree.closeNodeScope(jjtn001, true); - } - } -/*@egen*/ - - // Otherwise, we a dealing with a Sub-Expression, so start back from the top. - | ( < LPAREN > ( OrExpression() ) < RPAREN > ) - { - // pass on the notFlag state to the sub-expression - // note: the sub-expression might be a term (eg, "a" is a term in "!(!a)") - { - if (notFlag) { - Node n = jjtree.peekNode(); - if (n instanceof ASTExpression) { - boolean v = ((ASTExpression)n).isNotFlag(); - ((ASTExpression)n).setNotFlag(v ^ notFlag); - } - if (n instanceof ASTTerm) { boolean v = ((ASTTerm)n).isNotFlag(); ((ASTTerm)n).setNotFlag(v ^ notFlag); } - } - } - } - ) - -} \ No newline at end of file diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt deleted file mode 100644 index a215b6843..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt +++ /dev/null @@ -1,90 +0,0 @@ -/** - * A simple boolean query language that supports a number of features for free text querying. These - * features include: "and"s, "or"s, sub-expressions using parens "( )", negation, and prefix and postfix wildcard queries. - * - * Most of the classes in this package are auto-generated from QueryParser.jjt using javacc and jjtree. ASTExpression and - * ASTTerm slightly modified versions of the auto-generated files. - * - * I highly recommend the "JavaCC Eclipse Plug-in". - */ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - options { - MULTI = true; JDK_VERSION = "1.5"; IGNORE_CASE = true; NODE_DEFAULT_VOID=true; - static = false; -// DEBUG_PARSER = true; -// DEBUG_LOOKAHEAD = true; } PARSER_BEGIN(QueryParser) package mvm.rya.indexing.accumulo.freetext.query; - import java.io.StringReader; -public class QueryParser { - // Helper method to parse Strings (instead of streams) - public static SimpleNode parse(String query) throws ParseException, TokenMgrError { QueryParser parser = new QueryParser(new StringReader(query)); return parser.Start(); } } PARSER_END(QueryParser) SKIP : /* Ignore Whitespace */ { " " | "\t" | "\n" | "\r" -} -TOKEN : { - -| -| -| -| -| )* "\""> -| <#_QUOTED_CHAR: ~[ "\""] > -| (<_TERM_CHAR>)* > -| > -| "*" > -| <#_TERM_CHAR: ~[ " ", "\t", "\n", "\r", "*", "(", ")", "!"] > -} - SimpleNode Start() #SimpleNode: {} { - // "or"s have the lowest order of operations, so they will be highest on the tree. Start with them. OrExpression() < EOF > { return jjtThis; } } - -void OrExpression() #Expression(>1): -{ jjtThis.setType(ASTExpression.OR); } -{ - AndExpression() (< OR > AndExpression())* } - -void AndExpression() #Expression(>1): { jjtThis.setType(ASTExpression.AND); } -{ Term() ([< AND >] Term())* -} - void Term() : { Token t; boolean notFlag = false; String type = ""; } { - // Update the notFlag if a "not" is present - [ < NOT > { notFlag = true; } ] - - ( // Create a term, if a term is present - ( t = < TERM > { type = ASTTerm.TERM; } | t = < WILDTERM > { type = ASTTerm.WILDTERM; } - | t = < PREFIXTERM > { type = ASTTerm.PREFIXTERM; } - | t = < QUOTED > { type = ASTTerm.QUOTED; } - ) { jjtThis.setTerm(t.image); jjtThis.setNotFlag(notFlag); jjtThis.setType(type); } #Term() - - // Otherwise, we a dealing with a Sub-Expression, so start back from the top. - | ( < LPAREN > ( OrExpression() ) < RPAREN > ) - { - // pass on the notFlag state to the sub-expression - // note: the sub-expression might be a term (eg, "a" is a term in "!(!a)") - { - if (notFlag) { - Node n = jjtree.peekNode(); - if (n instanceof ASTExpression) { - boolean v = ((ASTExpression)n).isNotFlag(); - ((ASTExpression)n).setNotFlag(v ^ notFlag); - } - if (n instanceof ASTTerm) { boolean v = ((ASTTerm)n).isNotFlag(); ((ASTTerm)n).setNotFlag(v ^ notFlag); } - } - } - } - ) - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java deleted file mode 100644 index caac3a932..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java +++ /dev/null @@ -1,960 +0,0 @@ -/* Generated By:JJTree&JavaCC: Do not edit this line. QueryParserConstants.java */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - - -/** - * Token literal values and constants. - * Generated by org.javacc.parser.OtherFilesGen#start() - */ -public interface QueryParserConstants { - - /** End of File. */ - int EOF = 0; - /** RegularExpression Id. */ - int AND = 5; - /** RegularExpression Id. */ - int OR = 6; - /** RegularExpression Id. */ - int NOT = 7; - /** RegularExpression Id. */ - int LPAREN = 8; - /** RegularExpression Id. */ - int RPAREN = 9; - /** RegularExpression Id. */ - int QUOTED = 10; - /** RegularExpression Id. */ - int _QUOTED_CHAR = 11; - /** RegularExpression Id. */ - int TERM = 12; - /** RegularExpression Id. */ - int PREFIXTERM = 13; - /** RegularExpression Id. */ - int WILDTERM = 14; - /** RegularExpression Id. */ - int _TERM_CHAR = 15; - - /** Lexical state. */ - int DEFAULT = 0; - - /** Literal token values. */ - String[] tokenImage = { - "", - "\" \"", - "\"\\t\"", - "\"\\n\"", - "\"\\r\"", - "", - "", - "", - "\"(\"", - "\")\"", - "", - "<_QUOTED_CHAR>", - "", - "", - "", - "<_TERM_CHAR>", - }; - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java deleted file mode 100644 index 5c92c7b99..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java +++ /dev/null @@ -1,1389 +0,0 @@ -/* Generated By:JJTree&JavaCC: Do not edit this line. QueryParserTokenManager.java */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.StringReader; - -/** Token Manager. */ -public class QueryParserTokenManager implements QueryParserConstants -{ - - /** Debug output. */ - public java.io.PrintStream debugStream = System.out; - /** Set debug output. */ - public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } -private final int jjStopStringLiteralDfa_0(int pos, long active0) -{ - switch (pos) - { - default : - return -1; - } -} -private final int jjStartNfa_0(int pos, long active0) -{ - return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1); -} -private int jjStopAtPos(int pos, int kind) -{ - jjmatchedKind = kind; - jjmatchedPos = pos; - return pos + 1; -} -private int jjMoveStringLiteralDfa0_0() -{ - switch(curChar) - { - case 40: - return jjStopAtPos(0, 8); - case 41: - return jjStopAtPos(0, 9); - default : - return jjMoveNfa_0(2, 0); - } -} -static final long[] jjbitVec0 = { - 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -private int jjMoveNfa_0(int startState, int curPos) -{ - int startsAt = 0; - jjnewStateCnt = 24; - int i = 1; - jjstateSet[0] = startState; - int kind = 0x7fffffff; - for (;;) - { - if (++jjround == 0x7fffffff) - ReInitRounds(); - if (curChar < 64) - { - long l = 1L << curChar; - do - { - switch(jjstateSet[--i]) - { - case 2: - if ((0xfffff8fcffffd9ffL & l) != 0L) - { - if (kind > 12) - kind = 12; - jjCheckNAddStates(0, 2); - } - else if (curChar == 42) - jjCheckNAdd(19); - else if (curChar == 33) - { - if (kind > 7) - kind = 7; - } - if (curChar == 34) - jjCheckNAddTwoStates(16, 17); - else if (curChar == 38) - { - if (kind > 5) - kind = 5; - } - if (curChar == 38) - jjstateSet[jjnewStateCnt++] = 3; - break; - case 3: - if (curChar == 38 && kind > 5) - kind = 5; - break; - case 4: - if (curChar == 38) - jjstateSet[jjnewStateCnt++] = 3; - break; - case 5: - if (curChar == 38 && kind > 5) - kind = 5; - break; - case 14: - if (curChar == 33 && kind > 7) - kind = 7; - break; - case 15: - if (curChar == 34) - jjCheckNAddTwoStates(16, 17); - break; - case 16: - if ((0xfffffffbffffffffL & l) != 0L) - jjCheckNAddTwoStates(16, 17); - break; - case 17: - if (curChar == 34 && kind > 10) - kind = 10; - break; - case 18: - if (curChar == 42) - jjCheckNAdd(19); - break; - case 19: - if ((0xfffff8fcffffd9ffL & l) == 0L) - break; - if (kind > 13) - kind = 13; - jjCheckNAdd(19); - break; - case 20: - if ((0xfffff8fcffffd9ffL & l) == 0L) - break; - if (kind > 12) - kind = 12; - jjCheckNAddStates(0, 2); - break; - case 21: - if ((0xfffff8fcffffd9ffL & l) == 0L) - break; - if (kind > 12) - kind = 12; - jjCheckNAdd(21); - break; - case 22: - if ((0xfffff8fcffffd9ffL & l) != 0L) - jjCheckNAddTwoStates(22, 23); - break; - case 23: - if (curChar == 42 && kind > 14) - kind = 14; - break; - default : break; - } - } while(i != startsAt); - } - else if (curChar < 128) - { - long l = 1L << (curChar & 077); - do - { - switch(jjstateSet[--i]) - { - case 2: - if (kind > 12) - kind = 12; - jjCheckNAddStates(0, 2); - if ((0x400000004000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 12; - else if ((0x800000008000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 6; - else if ((0x200000002L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 1; - else if (curChar == 124) - { - if (kind > 6) - kind = 6; - } - if (curChar == 124) - jjstateSet[jjnewStateCnt++] = 8; - break; - case 0: - if ((0x1000000010L & l) != 0L && kind > 5) - kind = 5; - break; - case 1: - if ((0x400000004000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 0; - break; - case 6: - if ((0x4000000040000L & l) != 0L && kind > 6) - kind = 6; - break; - case 7: - if ((0x800000008000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 6; - break; - case 8: - if (curChar == 124 && kind > 6) - kind = 6; - break; - case 9: - if (curChar == 124) - jjstateSet[jjnewStateCnt++] = 8; - break; - case 10: - if (curChar == 124 && kind > 6) - kind = 6; - break; - case 11: - if ((0x10000000100000L & l) != 0L && kind > 7) - kind = 7; - break; - case 12: - if ((0x800000008000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 11; - break; - case 13: - if ((0x400000004000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 12; - break; - case 16: - jjAddStates(3, 4); - break; - case 19: - if (kind > 13) - kind = 13; - jjstateSet[jjnewStateCnt++] = 19; - break; - case 20: - if (kind > 12) - kind = 12; - jjCheckNAddStates(0, 2); - break; - case 21: - if (kind > 12) - kind = 12; - jjCheckNAdd(21); - break; - case 22: - jjCheckNAddTwoStates(22, 23); - break; - default : break; - } - } while(i != startsAt); - } - else - { - int i2 = (curChar & 0xff) >> 6; - long l2 = 1L << (curChar & 077); - do - { - switch(jjstateSet[--i]) - { - case 2: - if ((jjbitVec0[i2] & l2) == 0L) - break; - if (kind > 12) - kind = 12; - jjCheckNAddStates(0, 2); - break; - case 16: - if ((jjbitVec0[i2] & l2) != 0L) - jjAddStates(3, 4); - break; - case 19: - if ((jjbitVec0[i2] & l2) == 0L) - break; - if (kind > 13) - kind = 13; - jjstateSet[jjnewStateCnt++] = 19; - break; - case 21: - if ((jjbitVec0[i2] & l2) == 0L) - break; - if (kind > 12) - kind = 12; - jjCheckNAdd(21); - break; - case 22: - if ((jjbitVec0[i2] & l2) != 0L) - jjCheckNAddTwoStates(22, 23); - break; - default : break; - } - } while(i != startsAt); - } - if (kind != 0x7fffffff) - { - jjmatchedKind = kind; - jjmatchedPos = curPos; - kind = 0x7fffffff; - } - ++curPos; - if ((i = jjnewStateCnt) == (startsAt = 24 - (jjnewStateCnt = startsAt))) - return curPos; - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { return curPos; } - } -} -static final int[] jjnextStates = { - 21, 22, 23, 16, 17, -}; - -/** Token literal values. */ -public static final String[] jjstrLiteralImages = { -"", null, null, null, null, null, null, null, "\50", "\51", null, null, null, -null, null, null, }; - -/** Lexer state names. */ -public static final String[] lexStateNames = { - "DEFAULT", -}; -static final long[] jjtoToken = { - 0x77e1L, -}; -static final long[] jjtoSkip = { - 0x1eL, -}; -protected SimpleCharStream input_stream; -private final int[] jjrounds = new int[24]; -private final int[] jjstateSet = new int[48]; -protected char curChar; -/** Constructor. */ -public QueryParserTokenManager(SimpleCharStream stream){ - if (SimpleCharStream.staticFlag) - throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); - input_stream = stream; -} - -/** Constructor. */ -public QueryParserTokenManager(SimpleCharStream stream, int lexState){ - this(stream); - SwitchTo(lexState); -} - -/** Reinitialise parser. */ -public void ReInit(SimpleCharStream stream) -{ - jjmatchedPos = jjnewStateCnt = 0; - curLexState = defaultLexState; - input_stream = stream; - ReInitRounds(); -} -private void ReInitRounds() -{ - int i; - jjround = 0x80000001; - for (i = 24; i-- > 0;) - jjrounds[i] = 0x80000000; -} - -/** Reinitialise parser. */ -public void ReInit(SimpleCharStream stream, int lexState) -{ - ReInit(stream); - SwitchTo(lexState); -} - -/** Switch to specified lex state. */ -public void SwitchTo(int lexState) -{ - if (lexState >= 1 || lexState < 0) - throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); - else - curLexState = lexState; -} - -protected Token jjFillToken() -{ - final Token t; - final String curTokenImage; - final int beginLine; - final int endLine; - final int beginColumn; - final int endColumn; - String im = jjstrLiteralImages[jjmatchedKind]; - curTokenImage = (im == null) ? input_stream.GetImage() : im; - beginLine = input_stream.getBeginLine(); - beginColumn = input_stream.getBeginColumn(); - endLine = input_stream.getEndLine(); - endColumn = input_stream.getEndColumn(); - t = Token.newToken(jjmatchedKind, curTokenImage); - - t.beginLine = beginLine; - t.endLine = endLine; - t.beginColumn = beginColumn; - t.endColumn = endColumn; - - return t; -} - -int curLexState = 0; -int defaultLexState = 0; -int jjnewStateCnt; -int jjround; -int jjmatchedPos; -int jjmatchedKind; - -/** Get the next Token. */ -public Token getNextToken() -{ - Token matchedToken; - int curPos = 0; - - EOFLoop : - for (;;) - { - try - { - curChar = input_stream.BeginToken(); - } - catch(java.io.IOException e) - { - jjmatchedKind = 0; - matchedToken = jjFillToken(); - return matchedToken; - } - - try { input_stream.backup(0); - while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L) - curChar = input_stream.BeginToken(); - } - catch (java.io.IOException e1) { continue EOFLoop; } - jjmatchedKind = 0x7fffffff; - jjmatchedPos = 0; - curPos = jjMoveStringLiteralDfa0_0(); - if (jjmatchedKind != 0x7fffffff) - { - if (jjmatchedPos + 1 < curPos) - input_stream.backup(curPos - jjmatchedPos - 1); - if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) - { - matchedToken = jjFillToken(); - return matchedToken; - } - else - { - continue EOFLoop; - } - } - int error_line = input_stream.getEndLine(); - int error_column = input_stream.getEndColumn(); - String error_after = null; - boolean EOFSeen = false; - try { input_stream.readChar(); input_stream.backup(1); } - catch (java.io.IOException e1) { - EOFSeen = true; - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - if (curChar == '\n' || curChar == '\r') { - error_line++; - error_column = 0; - } - else - error_column++; - } - if (!EOFSeen) { - input_stream.backup(1); - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - } - throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); - } -} - -private void jjCheckNAdd(int state) -{ - if (jjrounds[state] != jjround) - { - jjstateSet[jjnewStateCnt++] = state; - jjrounds[state] = jjround; - } -} -private void jjAddStates(int start, int end) -{ - do { - jjstateSet[jjnewStateCnt++] = jjnextStates[start]; - } while (start++ != end); -} -private void jjCheckNAddTwoStates(int state1, int state2) -{ - jjCheckNAdd(state1); - jjCheckNAdd(state2); -} - -private void jjCheckNAddStates(int start, int end) -{ - do { - jjCheckNAdd(jjnextStates[start]); - } while (start++ != end); -} - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java deleted file mode 100644 index fcf0dc7aa..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java +++ /dev/null @@ -1,920 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. QueryParserTreeConstants.java Version 5.0 */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -public interface QueryParserTreeConstants -{ - public int JJTSIMPLENODE = 0; - public int JJTEXPRESSION = 1; - public int JJTVOID = 2; - public int JJTTERM = 3; - - - public String[] jjtNodeName = { - "SimpleNode", - "Expression", - "void", - "Term", - }; -} -/* JavaCC - OriginalChecksum=7db3f19ae343b33492ca4cbb4cb236be (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java deleted file mode 100644 index 3f49d1fbd..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java +++ /dev/null @@ -1,1372 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 5.0 */ -/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * An implementation of interface CharStream, where the stream is assumed to - * contain only ASCII characters (without unicode processing). - */ - -public class SimpleCharStream -{ -/** Whether parser is static. */ - public static final boolean staticFlag = false; - int bufsize; - int available; - int tokenBegin; -/** Position in buffer. */ - public int bufpos = -1; - protected int bufline[]; - protected int bufcolumn[]; - - protected int column = 0; - protected int line = 1; - - protected boolean prevCharIsCR = false; - protected boolean prevCharIsLF = false; - - protected java.io.Reader inputStream; - - protected char[] buffer; - protected int maxNextCharInd = 0; - protected int inBuf = 0; - protected int tabSize = 8; - - protected void setTabSize(int i) { tabSize = i; } - protected int getTabSize(int i) { return tabSize; } - - - protected void ExpandBuff(boolean wrapAround) - { - char[] newbuffer = new char[bufsize + 2048]; - int newbufline[] = new int[bufsize + 2048]; - int newbufcolumn[] = new int[bufsize + 2048]; - - try - { - if (wrapAround) - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos); - buffer = newbuffer; - - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); - bufline = newbufline; - - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); - bufcolumn = newbufcolumn; - - maxNextCharInd = (bufpos += (bufsize - tokenBegin)); - } - else - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - buffer = newbuffer; - - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - bufline = newbufline; - - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - bufcolumn = newbufcolumn; - - maxNextCharInd = (bufpos -= tokenBegin); - } - } - catch (Throwable t) - { - throw new Error(t.getMessage()); - } - - - bufsize += 2048; - available = bufsize; - tokenBegin = 0; - } - - protected void FillBuff() throws java.io.IOException - { - if (maxNextCharInd == available) - { - if (available == bufsize) - { - if (tokenBegin > 2048) - { - bufpos = maxNextCharInd = 0; - available = tokenBegin; - } - else if (tokenBegin < 0) - bufpos = maxNextCharInd = 0; - else - ExpandBuff(false); - } - else if (available > tokenBegin) - available = bufsize; - else if ((tokenBegin - available) < 2048) - ExpandBuff(true); - else - available = tokenBegin; - } - - int i; - try { - if ((i = inputStream.read(buffer, maxNextCharInd, available - maxNextCharInd)) == -1) - { - inputStream.close(); - throw new java.io.IOException(); - } - else - maxNextCharInd += i; - return; - } - catch(java.io.IOException e) { - --bufpos; - backup(0); - if (tokenBegin == -1) - tokenBegin = bufpos; - throw e; - } - } - -/** Start. */ - public char BeginToken() throws java.io.IOException - { - tokenBegin = -1; - char c = readChar(); - tokenBegin = bufpos; - - return c; - } - - protected void UpdateLineColumn(char c) - { - column++; - - if (prevCharIsLF) - { - prevCharIsLF = false; - line += (column = 1); - } - else if (prevCharIsCR) - { - prevCharIsCR = false; - if (c == '\n') - { - prevCharIsLF = true; - } - else - line += (column = 1); - } - - switch (c) - { - case '\r' : - prevCharIsCR = true; - break; - case '\n' : - prevCharIsLF = true; - break; - case '\t' : - column--; - column += (tabSize - (column % tabSize)); - break; - default : - break; - } - - bufline[bufpos] = line; - bufcolumn[bufpos] = column; - } - -/** Read a character. */ - public char readChar() throws java.io.IOException - { - if (inBuf > 0) - { - --inBuf; - - if (++bufpos == bufsize) - bufpos = 0; - - return buffer[bufpos]; - } - - if (++bufpos >= maxNextCharInd) - FillBuff(); - - char c = buffer[bufpos]; - - UpdateLineColumn(c); - return c; - } - - @Deprecated - /** - * @deprecated - * @see #getEndColumn - */ - - public int getColumn() { - return bufcolumn[bufpos]; - } - - @Deprecated - /** - * @deprecated - * @see #getEndLine - */ - - public int getLine() { - return bufline[bufpos]; - } - - /** Get token end column number. */ - public int getEndColumn() { - return bufcolumn[bufpos]; - } - - /** Get token end line number. */ - public int getEndLine() { - return bufline[bufpos]; - } - - /** Get token beginning column number. */ - public int getBeginColumn() { - return bufcolumn[tokenBegin]; - } - - /** Get token beginning line number. */ - public int getBeginLine() { - return bufline[tokenBegin]; - } - -/** Backup a number of characters. */ - public void backup(int amount) { - - inBuf += amount; - if ((bufpos -= amount) < 0) - bufpos += bufsize; - } - - /** Constructor. */ - public SimpleCharStream(java.io.Reader dstream, int startline, - int startcolumn, int buffersize) - { - inputStream = dstream; - line = startline; - column = startcolumn - 1; - - available = bufsize = buffersize; - buffer = new char[buffersize]; - bufline = new int[buffersize]; - bufcolumn = new int[buffersize]; - } - - /** Constructor. */ - public SimpleCharStream(java.io.Reader dstream, int startline, - int startcolumn) - { - this(dstream, startline, startcolumn, 4096); - } - - /** Constructor. */ - public SimpleCharStream(java.io.Reader dstream) - { - this(dstream, 1, 1, 4096); - } - - /** Reinitialise. */ - public void ReInit(java.io.Reader dstream, int startline, - int startcolumn, int buffersize) - { - inputStream = dstream; - line = startline; - column = startcolumn - 1; - - if (buffer == null || buffersize != buffer.length) - { - available = bufsize = buffersize; - buffer = new char[buffersize]; - bufline = new int[buffersize]; - bufcolumn = new int[buffersize]; - } - prevCharIsLF = prevCharIsCR = false; - tokenBegin = inBuf = maxNextCharInd = 0; - bufpos = -1; - } - - /** Reinitialise. */ - public void ReInit(java.io.Reader dstream, int startline, - int startcolumn) - { - ReInit(dstream, startline, startcolumn, 4096); - } - - /** Reinitialise. */ - public void ReInit(java.io.Reader dstream) - { - ReInit(dstream, 1, 1, 4096); - } - /** Constructor. */ - public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, - int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException - { - this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); - } - - /** Constructor. */ - public SimpleCharStream(java.io.InputStream dstream, int startline, - int startcolumn, int buffersize) - { - this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); - } - - /** Constructor. */ - public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, - int startcolumn) throws java.io.UnsupportedEncodingException - { - this(dstream, encoding, startline, startcolumn, 4096); - } - - /** Constructor. */ - public SimpleCharStream(java.io.InputStream dstream, int startline, - int startcolumn) - { - this(dstream, startline, startcolumn, 4096); - } - - /** Constructor. */ - public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException - { - this(dstream, encoding, 1, 1, 4096); - } - - /** Constructor. */ - public SimpleCharStream(java.io.InputStream dstream) - { - this(dstream, 1, 1, 4096); - } - - /** Reinitialise. */ - public void ReInit(java.io.InputStream dstream, String encoding, int startline, - int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException - { - ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); - } - - /** Reinitialise. */ - public void ReInit(java.io.InputStream dstream, int startline, - int startcolumn, int buffersize) - { - ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); - } - - /** Reinitialise. */ - public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException - { - ReInit(dstream, encoding, 1, 1, 4096); - } - - /** Reinitialise. */ - public void ReInit(java.io.InputStream dstream) - { - ReInit(dstream, 1, 1, 4096); - } - /** Reinitialise. */ - public void ReInit(java.io.InputStream dstream, String encoding, int startline, - int startcolumn) throws java.io.UnsupportedEncodingException - { - ReInit(dstream, encoding, startline, startcolumn, 4096); - } - /** Reinitialise. */ - public void ReInit(java.io.InputStream dstream, int startline, - int startcolumn) - { - ReInit(dstream, startline, startcolumn, 4096); - } - /** Get token literal value. */ - public String GetImage() - { - if (bufpos >= tokenBegin) - return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); - else - return new String(buffer, tokenBegin, bufsize - tokenBegin) + - new String(buffer, 0, bufpos + 1); - } - - /** Get the suffix. */ - public char[] GetSuffix(int len) - { - char[] ret = new char[len]; - - if ((bufpos + 1) >= len) - System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); - else - { - System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, - len - bufpos - 1); - System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); - } - - return ret; - } - - /** Reset buffer when finished. */ - public void Done() - { - buffer = null; - bufline = null; - bufcolumn = null; - } - - /** - * Method to adjust line and column numbers for the start of a token. - */ - public void adjustBeginLineColumn(int newLine, int newCol) - { - int start = tokenBegin; - int len; - - if (bufpos >= tokenBegin) - { - len = bufpos - tokenBegin + inBuf + 1; - } - else - { - len = bufsize - tokenBegin + bufpos + 1 + inBuf; - } - - int i = 0, j = 0, k = 0; - int nextColDiff = 0, columnDiff = 0; - - while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) - { - bufline[j] = newLine; - nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; - bufcolumn[j] = newCol + columnDiff; - columnDiff = nextColDiff; - i++; - } - - if (i < len) - { - bufline[j] = newLine++; - bufcolumn[j] = newCol + columnDiff; - - while (i++ < len) - { - if (bufline[j = start % bufsize] != bufline[++start % bufsize]) - bufline[j] = newLine++; - else - bufline[j] = newLine; - } - } - - line = bufline[j]; - column = bufcolumn[j]; - } - -} -/* JavaCC - OriginalChecksum=003f6ea93d012999f2e1302d1daab102 (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java deleted file mode 100644 index d372615f9..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java +++ /dev/null @@ -1,980 +0,0 @@ -/* Generated By:JJTree: Do not edit this line. SimpleNode.java Version 4.3 */ -/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -public -class SimpleNode implements Node { - - protected Node parent; - protected Node[] children; - protected int id; - protected Object value; - protected QueryParser parser; - - public SimpleNode(int i) { - id = i; - } - - public SimpleNode(QueryParser p, int i) { - this(i); - parser = p; - } - - public void jjtOpen() { - } - - public void jjtClose() { - } - - public void jjtSetParent(Node n) { parent = n; } - public Node jjtGetParent() { return parent; } - - public void jjtAddChild(Node n, int i) { - if (children == null) { - children = new Node[i + 1]; - } else if (i >= children.length) { - Node c[] = new Node[i + 1]; - System.arraycopy(children, 0, c, 0, children.length); - children = c; - } - children[i] = n; - } - - public Node jjtGetChild(int i) { - return children[i]; - } - - public int jjtGetNumChildren() { - return (children == null) ? 0 : children.length; - } - - public void jjtSetValue(Object value) { this.value = value; } - public Object jjtGetValue() { return value; } - - /* You can override these two methods in subclasses of SimpleNode to - customize the way the node appears when the tree is dumped. If - your output uses more than one line you should override - toString(String), otherwise overriding toString() is probably all - you need to do. */ - - public String toString() { return QueryParserTreeConstants.jjtNodeName[id]; } - public String toString(String prefix) { return prefix + toString(); } - - /* Override this method if you want to customize how the node dumps - out its children. */ - - public void dump(String prefix) { - System.out.println(toString(prefix)); - if (children != null) { - for (int i = 0; i < children.length; ++i) { - SimpleNode n = (SimpleNode)children[i]; - if (n != null) { - n.dump(prefix + " "); - } - } - } - } -} - -/* JavaCC - OriginalChecksum=d65b3d27c1d9231908f90be143472875 (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java deleted file mode 100644 index 95292d300..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java +++ /dev/null @@ -1,1032 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. Token.java Version 5.0 */ -/* JavaCCOptions:TOKEN_EXTENDS=,KEEP_LINE_COL=null,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Describes the input token stream. - */ - -public class Token implements java.io.Serializable { - - /** - * The version identifier for this Serializable class. - * Increment only if the serialized form of the - * class changes. - */ - private static final long serialVersionUID = 1L; - - /** - * An integer that describes the kind of this token. This numbering - * system is determined by JavaCCParser, and a table of these numbers is - * stored in the file ...Constants.java. - */ - public int kind; - - /** The line number of the first character of this Token. */ - public int beginLine; - /** The column number of the first character of this Token. */ - public int beginColumn; - /** The line number of the last character of this Token. */ - public int endLine; - /** The column number of the last character of this Token. */ - public int endColumn; - - /** - * The string image of the token. - */ - public String image; - - /** - * A reference to the next regular (non-special) token from the input - * stream. If this is the last token from the input stream, or if the - * token manager has not read tokens beyond this one, this field is - * set to null. This is true only if this token is also a regular - * token. Otherwise, see below for a description of the contents of - * this field. - */ - public Token next; - - /** - * This field is used to access special tokens that occur prior to this - * token, but after the immediately preceding regular (non-special) token. - * If there are no such special tokens, this field is set to null. - * When there are more than one such special token, this field refers - * to the last of these special tokens, which in turn refers to the next - * previous special token through its specialToken field, and so on - * until the first special token (whose specialToken field is null). - * The next fields of special tokens refer to other special tokens that - * immediately follow it (without an intervening regular token). If there - * is no such token, this field is null. - */ - public Token specialToken; - - /** - * An optional attribute value of the Token. - * Tokens which are not used as syntactic sugar will often contain - * meaningful values that will be used later on by the compiler or - * interpreter. This attribute value is often different from the image. - * Any subclass of Token that actually wants to return a non-null value can - * override this method as appropriate. - */ - public Object getValue() { - return null; - } - - /** - * No-argument constructor - */ - public Token() {} - - /** - * Constructs a new token for the specified Image. - */ - public Token(int kind) - { - this(kind, null); - } - - /** - * Constructs a new token for the specified Image and Kind. - */ - public Token(int kind, String image) - { - this.kind = kind; - this.image = image; - } - - /** - * Returns the image. - */ - public String toString() - { - return image; - } - - /** - * Returns a new Token object, by default. However, if you want, you - * can create and return subclass objects based on the value of ofKind. - * Simply add the cases to the switch for all those special cases. - * For example, if you have a subclass of Token called IDToken that - * you want to create if ofKind is ID, simply add something like : - * - * case MyParserConstants.ID : return new IDToken(ofKind, image); - * - * to the following switch statement. Then you can cast matchedToken - * variable to the appropriate type and use sit in your lexical actions. - */ - public static Token newToken(int ofKind, String image) - { - switch(ofKind) - { - default : return new Token(ofKind, image); - } - } - - public static Token newToken(int ofKind) - { - return newToken(ofKind, null); - } - -} -/* JavaCC - OriginalChecksum=6e0a6d0b8d0fef396f67c3e7b1b29b5c (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java deleted file mode 100644 index ab3b0f156..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java +++ /dev/null @@ -1,1048 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 5.0 */ -/* JavaCCOptions: */ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** Token Manager Error. */ -public class TokenMgrError extends Error -{ - - /** - * The version identifier for this Serializable class. - * Increment only if the serialized form of the - * class changes. - */ - private static final long serialVersionUID = 1L; - - /* - * Ordinals for various reasons why an Error of this type can be thrown. - */ - - /** - * Lexical error occurred. - */ - static final int LEXICAL_ERROR = 0; - - /** - * An attempt was made to create a second instance of a static token manager. - */ - static final int STATIC_LEXER_ERROR = 1; - - /** - * Tried to change to an invalid lexical state. - */ - static final int INVALID_LEXICAL_STATE = 2; - - /** - * Detected (and bailed out of) an infinite loop in the token manager. - */ - static final int LOOP_DETECTED = 3; - - /** - * Indicates the reason why the exception is thrown. It will have - * one of the above 4 values. - */ - int errorCode; - - /** - * Replaces unprintable characters by their escaped (or unicode escaped) - * equivalents in the given string - */ - protected static final String addEscapes(String str) { - StringBuffer retval = new StringBuffer(); - char ch; - for (int i = 0; i < str.length(); i++) { - switch (str.charAt(i)) - { - case 0 : - continue; - case '\b': - retval.append("\\b"); - continue; - case '\t': - retval.append("\\t"); - continue; - case '\n': - retval.append("\\n"); - continue; - case '\f': - retval.append("\\f"); - continue; - case '\r': - retval.append("\\r"); - continue; - case '\"': - retval.append("\\\""); - continue; - case '\'': - retval.append("\\\'"); - continue; - case '\\': - retval.append("\\\\"); - continue; - default: - if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { - String s = "0000" + Integer.toString(ch, 16); - retval.append("\\u" + s.substring(s.length() - 4, s.length())); - } else { - retval.append(ch); - } - continue; - } - } - return retval.toString(); - } - - /** - * Returns a detailed message for the Error when it is thrown by the - * token manager to indicate a lexical error. - * Parameters : - * EOFSeen : indicates if EOF caused the lexical error - * curLexState : lexical state in which this error occurred - * errorLine : line number when the error occurred - * errorColumn : column number when the error occurred - * errorAfter : prefix that was seen before this error occurred - * curchar : the offending character - * Note: You can customize the lexical error message by modifying this method. - */ - protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { - return("Lexical error at line " + - errorLine + ", column " + - errorColumn + ". Encountered: " + - (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + - "after : \"" + addEscapes(errorAfter) + "\""); - } - - /** - * You can also modify the body of this method to customize your error messages. - * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not - * of end-users concern, so you can return something like : - * - * "Internal Error : Please file a bug report .... " - * - * from this method for such cases in the release version of your parser. - */ - public String getMessage() { - return super.getMessage(); - } - - /* - * Constructors of various flavors follow. - */ - - /** No arg constructor. */ - public TokenMgrError() { - } - - /** Constructor with message and reason. */ - public TokenMgrError(String message, int reason) { - super(message); - errorCode = reason; - } - - /** Full Constructor. */ - public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { - this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); - } -} -/* JavaCC - OriginalChecksum=290a4c5d743d0af7d70c6c0c9cd1d448 (do not edit this line) */ diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java deleted file mode 100644 index 6cb01e119..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java +++ /dev/null @@ -1,45 +0,0 @@ -package mvm.rya.indexing.accumulo.geo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.URI; -import org.openrdf.model.impl.URIImpl; - -/** - * A set of URIs used in GeoSPARQL - */ -public class GeoConstants { - public static final String NS_GEO = "http://www.opengis.net/ont/geosparql#"; - public static final String NS_GEOF = "http://www.opengis.net/def/function/geosparql/"; - - public static final URI XMLSCHEMA_OGC_WKT = new URIImpl(NS_GEO + "wktLiteral"); - public static final URI GEO_AS_WKT = new URIImpl(NS_GEO + "asWKT"); - - public static final URI GEO_SF_EQUALS = new URIImpl(NS_GEOF + "sfEquals"); - public static final URI GEO_SF_DISJOINT = new URIImpl(NS_GEOF + "sfDisjoint"); - public static final URI GEO_SF_INTERSECTS = new URIImpl(NS_GEOF + "sfIntersects"); - public static final URI GEO_SF_TOUCHES = new URIImpl(NS_GEOF + "sfTouches"); - public static final URI GEO_SF_CROSSES = new URIImpl(NS_GEOF + "sfCrosses"); - public static final URI GEO_SF_WITHIN = new URIImpl(NS_GEOF + "sfWithin"); - public static final URI GEO_SF_CONTAINS = new URIImpl(NS_GEOF + "sfContains"); - public static final URI GEO_SF_OVERLAPS = new URIImpl(NS_GEOF + "sfOverlaps"); -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java deleted file mode 100644 index 37acf8997..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java +++ /dev/null @@ -1,447 +0,0 @@ -package mvm.rya.indexing.accumulo.geo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.accumulo.experimental.AccumuloIndexer; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.Md5Hash; -import mvm.rya.indexing.accumulo.StatementSerializer; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.MultiTableBatchWriter; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.log4j.Logger; -import org.geotools.data.DataStore; -import org.geotools.data.DataStoreFinder; -import org.geotools.data.FeatureSource; -import org.geotools.data.FeatureStore; -import org.geotools.data.Query; -import org.geotools.factory.Hints; -import org.geotools.feature.DefaultFeatureCollection; -import org.geotools.feature.FeatureIterator; -import org.geotools.feature.SchemaException; -import org.geotools.feature.simple.SimpleFeatureBuilder; -import org.geotools.filter.text.cql2.CQLException; -import org.geotools.filter.text.ecql.ECQL; -import org.locationtech.geomesa.accumulo.index.Constants; -import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes; -import org.opengis.feature.simple.SimpleFeature; -import org.opengis.feature.simple.SimpleFeatureType; -import org.opengis.filter.Filter; -import org.openrdf.model.Literal; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -import com.google.common.base.Preconditions; -import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.io.ParseException; -import com.vividsolutions.jts.io.WKTReader; - -/** - * A {@link GeoIndexer} wrapper around a GeoMesa {@link AccumuloDataStore}. This class configures and connects to the Datastore, creates the - * RDF Feature Type, and interacts with the Datastore. - *

- * Specifically, this class creates a RDF Feature type and stores each RDF Statement as a RDF Feature in the datastore. Each feature - * contains the standard set of GeoMesa attributes (Geometry, Start Date, and End Date). The GeoMesaGeoIndexer populates the Geometry - * attribute by parsing the Well-Known Text contained in the RDF Statement’s object literal value. - *

- * The RDF Feature contains four additional attributes for each component of the RDF Statement. These attributes are: - *

- * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
NameSymbolType
Subject AttributeSString
Predicate AttributePString
Object AttributeOString
Context AttributeCString
- */ -public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoIndexer { - - private static final Logger logger = Logger.getLogger(GeoMesaGeoIndexer.class); - - private static final String FEATURE_NAME = "RDF"; - - private static final String SUBJECT_ATTRIBUTE = "S"; - private static final String PREDICATE_ATTRIBUTE = "P"; - private static final String OBJECT_ATTRIBUTE = "O"; - private static final String CONTEXT_ATTRIBUTE = "C"; - - private Set validPredicates; - private Configuration conf; - private FeatureStore featureStore; - private FeatureSource featureSource; - private SimpleFeatureType featureType; - private boolean isInit = false; - - //initialization occurs in setConf because index is created using reflection - @Override - public void setConf(Configuration conf) { - this.conf = conf; - if (!isInit) { - try { - init(); - isInit = true; - } catch (IOException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } - } - } - - @Override - public Configuration getConf() { - return this.conf; - } - - - private void init() throws IOException { - validPredicates = ConfigUtils.getGeoPredicates(conf); - - DataStore dataStore = createDataStore(conf); - - try { - featureType = getStatementFeatureType(dataStore); - } catch (IOException e) { - throw new IOException(e); - } catch (SchemaException e) { - throw new IOException(e); - } - - featureSource = dataStore.getFeatureSource(featureType.getName()); - if (!(featureSource instanceof FeatureStore)) { - throw new IllegalStateException("Could not retrieve feature store"); - } - featureStore = (FeatureStore) featureSource; - } - - private static DataStore createDataStore(Configuration conf) throws IOException { - // get the configuration parameters - Instance instance = ConfigUtils.getInstance(conf); - boolean useMock = instance instanceof MockInstance; - String instanceId = instance.getInstanceName(); - String zookeepers = instance.getZooKeepers(); - String user = ConfigUtils.getUsername(conf); - String password = ConfigUtils.getPassword(conf); - String auths = ConfigUtils.getAuthorizations(conf).toString(); - String tableName = ConfigUtils.getGeoTablename(conf); - int numParitions = ConfigUtils.getGeoNumPartitions(conf); - - String featureSchemaFormat = "%~#s%" + numParitions + "#r%" + FEATURE_NAME - + "#cstr%0,3#gh%yyyyMMdd#d::%~#s%3,2#gh::%~#s%#id"; - // build the map of parameters - Map params = new HashMap(); - params.put("instanceId", instanceId); - params.put("zookeepers", zookeepers); - params.put("user", user); - params.put("password", password); - params.put("auths", auths); - params.put("tableName", tableName); - params.put("indexSchemaFormat", featureSchemaFormat); - params.put("useMock", Boolean.toString(useMock)); - - // fetch the data store from the finder - return DataStoreFinder.getDataStore(params); - } - - private static SimpleFeatureType getStatementFeatureType(DataStore dataStore) throws IOException, SchemaException { - SimpleFeatureType featureType; - - String[] datastoreFeatures = dataStore.getTypeNames(); - if (Arrays.asList(datastoreFeatures).contains(FEATURE_NAME)) { - featureType = dataStore.getSchema(FEATURE_NAME); - } else { - String featureSchema = SUBJECT_ATTRIBUTE + ":String," // - + PREDICATE_ATTRIBUTE + ":String," // - + OBJECT_ATTRIBUTE + ":String," // - + CONTEXT_ATTRIBUTE + ":String," // - + Constants.SF_PROPERTY_GEOMETRY + ":Geometry:srid=4326"; - featureType = SimpleFeatureTypes.createType(FEATURE_NAME, featureSchema); - dataStore.createSchema(featureType); - } - return featureType; - } - - @Override - public void storeStatements(Collection ryaStatements) throws IOException { - // create a feature collection - DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); - - - for (RyaStatement ryaStatement : ryaStatements) { - - Statement statement = RyaToRdfConversions.convertStatement(ryaStatement); - // if the predicate list is empty, accept all predicates. - // Otherwise, make sure the predicate is on the "valid" list - boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate()); - - if (isValidPredicate && (statement.getObject() instanceof Literal)) { - try { - SimpleFeature feature = createFeature(featureType, statement); - featureCollection.add(feature); - } catch (ParseException e) { - logger.warn("Error getting geo from statement: " + statement.toString(), e); - } - } - } - - // write this feature collection to the store - if (!featureCollection.isEmpty()) { - featureStore.addFeatures(featureCollection); - } - } - - - @Override - public void storeStatement(RyaStatement statement) throws IOException { - storeStatements(Collections.singleton(statement)); - } - - private static SimpleFeature createFeature(SimpleFeatureType featureType, Statement statement) throws ParseException { - String subject = StatementSerializer.writeSubject(statement); - String predicate = StatementSerializer.writePredicate(statement); - String object = StatementSerializer.writeObject(statement); - String context = StatementSerializer.writeContext(statement); - - // create the feature - Object[] noValues = {}; - - // create the hash - String statementId = Md5Hash.md5Base64(StatementSerializer.writeStatement(statement)); - SimpleFeature newFeature = SimpleFeatureBuilder.build(featureType, noValues, statementId); - - // write the statement data to the fields - Geometry geom = (new WKTReader()).read(GeoParseUtils.getWellKnownText(statement)); - if(geom == null || geom.isEmpty() || !geom.isValid()) { - throw new ParseException("Could not create geometry for statement " + statement); - } - newFeature.setDefaultGeometry(geom); - - newFeature.setAttribute(SUBJECT_ATTRIBUTE, subject); - newFeature.setAttribute(PREDICATE_ATTRIBUTE, predicate); - newFeature.setAttribute(OBJECT_ATTRIBUTE, object); - newFeature.setAttribute(CONTEXT_ATTRIBUTE, context); - - // preserve the ID that we created for this feature - // (set the hint to FALSE to have GeoTools generate IDs) - newFeature.getUserData().put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE); - - return newFeature; - } - - private CloseableIteration performQuery(String type, Geometry geometry, - StatementContraints contraints) { - List filterParms = new ArrayList(); - - filterParms.add(type + "(" + Constants.SF_PROPERTY_GEOMETRY + ", " + geometry + " )"); - - if (contraints.hasSubject()) { - filterParms.add("( " + SUBJECT_ATTRIBUTE + "= '" + contraints.getSubject() + "') "); - } - if (contraints.hasContext()) { - filterParms.add("( " + CONTEXT_ATTRIBUTE + "= '" + contraints.getContext() + "') "); - } - if (contraints.hasPredicates()) { - List predicates = new ArrayList(); - for (URI u : contraints.getPredicates()) { - predicates.add("( " + PREDICATE_ATTRIBUTE + "= '" + u.stringValue() + "') "); - } - filterParms.add("(" + StringUtils.join(predicates, " OR ") + ")"); - } - - String filterString = StringUtils.join(filterParms, " AND "); - logger.info("Performing geomesa query : " + filterString); - - return getIteratorWrapper(filterString); - } - - private CloseableIteration getIteratorWrapper(final String filterString) { - - return new CloseableIteration() { - - private FeatureIterator featureIterator = null; - - FeatureIterator getIterator() throws QueryEvaluationException { - if (featureIterator == null) { - Filter cqlFilter; - try { - cqlFilter = ECQL.toFilter(filterString); - } catch (CQLException e) { - logger.error("Error parsing query: " + filterString, e); - throw new QueryEvaluationException(e); - } - - Query query = new Query(featureType.getTypeName(), cqlFilter); - try { - featureIterator = featureSource.getFeatures(query).features(); - } catch (IOException e) { - logger.error("Error performing query: " + filterString, e); - throw new QueryEvaluationException(e); - } - - } - return featureIterator; - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - return getIterator().hasNext(); - } - - @Override - public Statement next() throws QueryEvaluationException { - SimpleFeature feature = (SimpleFeature) getIterator().next(); - String subjectString = feature.getAttribute(SUBJECT_ATTRIBUTE).toString(); - String predicateString = feature.getAttribute(PREDICATE_ATTRIBUTE).toString(); - String objectString = feature.getAttribute(OBJECT_ATTRIBUTE).toString(); - String contextString = feature.getAttribute(CONTEXT_ATTRIBUTE).toString(); - Statement statement = StatementSerializer.readStatement(subjectString, predicateString, objectString, contextString); - return statement; - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Remove not implemented"); - } - - @Override - public void close() throws QueryEvaluationException { - getIterator().close(); - } - }; - } - - @Override - public CloseableIteration queryEquals(Geometry query, StatementContraints contraints) { - return performQuery("EQUALS", query, contraints); - } - - @Override - public CloseableIteration queryDisjoint(Geometry query, StatementContraints contraints) { - return performQuery("DISJOINT", query, contraints); - } - - @Override - public CloseableIteration queryIntersects(Geometry query, StatementContraints contraints) { - return performQuery("INTERSECTS", query, contraints); - } - - @Override - public CloseableIteration queryTouches(Geometry query, StatementContraints contraints) { - return performQuery("TOUCHES", query, contraints); - } - - @Override - public CloseableIteration queryCrosses(Geometry query, StatementContraints contraints) { - return performQuery("CROSSES", query, contraints); - } - - @Override - public CloseableIteration queryWithin(Geometry query, StatementContraints contraints) { - return performQuery("WITHIN", query, contraints); - } - - @Override - public CloseableIteration queryContains(Geometry query, StatementContraints contraints) { - return performQuery("CONTAINS", query, contraints); - } - - @Override - public CloseableIteration queryOverlaps(Geometry query, StatementContraints contraints) { - return performQuery("OVERLAPS", query, contraints); - } - - @Override - public Set getIndexablePredicates() { - return validPredicates; - } - - @Override - public void flush() throws IOException { - // TODO cache and flush features instead of writing them one at a time - } - - @Override - public void close() throws IOException { - flush(); - } - - - @Override - public String getTableName() { - return ConfigUtils.getGeoTablename(conf); - } - - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java deleted file mode 100644 index e5c3adff0..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java +++ /dev/null @@ -1,46 +0,0 @@ -package mvm.rya.indexing.accumulo.geo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.apache.log4j.Logger; -import org.openrdf.model.Literal; -import org.openrdf.model.Statement; - -import com.vividsolutions.jts.io.ParseException; - -public class GeoParseUtils { - static final Logger logger = Logger.getLogger(GeoParseUtils.class); - - public static String getWellKnownText(Statement statement) throws ParseException { - org.openrdf.model.Value v = statement.getObject(); - if (!(v instanceof Literal)) { - throw new ParseException("Statement does not contain Literal: " + statement.toString()); - } - - Literal lit = (Literal) v; - if (!GeoConstants.XMLSCHEMA_OGC_WKT.equals(lit.getDatatype())) { - logger.warn("Literal is not of type " + GeoConstants.XMLSCHEMA_OGC_WKT + ": " + statement.toString()); - } - - return lit.getLabel().toString(); - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java deleted file mode 100644 index e7a5d68a2..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java +++ /dev/null @@ -1,364 +0,0 @@ -package mvm.rya.indexing.accumulo.geo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Map; -import java.util.Set; - -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.IndexingExpr; -import mvm.rya.indexing.IteratorFactory; -import mvm.rya.indexing.SearchFunction; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.QueryModelVisitor; - -import com.google.common.base.Joiner; -import com.google.common.collect.Maps; -import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.io.ParseException; -import com.vividsolutions.jts.io.WKTReader; - -//Indexing Node for geo expressions to be inserted into execution plan -//to delegate geo portion of query to geo index -public class GeoTupleSet extends ExternalTupleSet { - - private Configuration conf; - private GeoIndexer geoIndexer; - private IndexingExpr filterInfo; - - - public GeoTupleSet(IndexingExpr filterInfo, GeoIndexer geoIndexer) { - this.filterInfo = filterInfo; - this.geoIndexer = geoIndexer; - this.conf = geoIndexer.getConf(); - } - - @Override - public Set getBindingNames() { - return filterInfo.getBindingNames(); - } - - public GeoTupleSet clone() { - return new GeoTupleSet(filterInfo, geoIndexer); - } - - @Override - public double cardinality() { - return 0.0; // No idea how the estimate cardinality here. - } - - - @Override - public String getSignature() { - return "(GeoTuple Projection) " + "variables: " + Joiner.on(", ").join(this.getBindingNames()).replaceAll("\\s+", " "); - } - - - - @Override - public boolean equals(Object other) { - if (other == this) { - return true; - } - if (!(other instanceof GeoTupleSet)) { - return false; - } - GeoTupleSet arg = (GeoTupleSet) other; - return this.filterInfo.equals(arg.filterInfo); - } - - @Override - public int hashCode() { - int result = 17; - result = 31*result + filterInfo.hashCode(); - - return result; - } - - - - /** - * Returns an iterator over the result set of the contained IndexingExpr. - *

- * Should be thread-safe (concurrent invocation {@link OfflineIterable} this - * method can be expected with some query evaluators. - */ - @Override - public CloseableIteration evaluate(BindingSet bindings) - throws QueryEvaluationException { - - - URI funcURI = filterInfo.getFunction(); - SearchFunction searchFunction = (new GeoSearchFunctionFactory(conf)).getSearchFunction(funcURI); - if(filterInfo.getArguments().length > 1) { - throw new IllegalArgumentException("Index functions do not support more than two arguments."); - } - - String queryText = filterInfo.getArguments()[0].stringValue(); - - return IteratorFactory.getIterator(filterInfo.getSpConstraint(), bindings, queryText, searchFunction); - } - - - - //returns appropriate search function for a given URI - //search functions used in GeoMesaGeoIndexer to access index - public class GeoSearchFunctionFactory { - - Configuration conf; - - private final Map SEARCH_FUNCTION_MAP = Maps.newHashMap(); - - public GeoSearchFunctionFactory(Configuration conf) { - this.conf = conf; - } - - - /** - * Get a {@link GeoSearchFunction} for a given URI. - * - * @param searchFunction - * @return - */ - public SearchFunction getSearchFunction(final URI searchFunction) { - - SearchFunction geoFunc = null; - - try { - geoFunc = getSearchFunctionInternal(searchFunction); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - } - - return geoFunc; - } - - private SearchFunction getSearchFunctionInternal(final URI searchFunction) throws QueryEvaluationException { - SearchFunction sf = SEARCH_FUNCTION_MAP.get(searchFunction); - - if (sf != null) { - return sf; - } else { - throw new QueryEvaluationException("Unknown Search Function: " + searchFunction.stringValue()); - } - } - - private final SearchFunction GEO_EQUALS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_EQUALS"; - }; - }; - - private final SearchFunction GEO_DISJOINT = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_DISJOINT"; - }; - }; - - private final SearchFunction GEO_INTERSECTS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_INTERSECTS"; - }; - }; - - private final SearchFunction GEO_TOUCHES = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_TOUCHES"; - }; - }; - - private final SearchFunction GEO_CONTAINS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_CONTAINS"; - }; - }; - - private final SearchFunction GEO_OVERLAPS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_OVERLAPS"; - }; - }; - - private final SearchFunction GEO_CROSSES = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_CROSSES"; - }; - }; - - private final SearchFunction GEO_WITHIN = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_WITHIN"; - }; - }; - - { - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_EQUALS, GEO_EQUALS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_DISJOINT, GEO_DISJOINT); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_INTERSECTS, GEO_INTERSECTS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_TOUCHES, GEO_TOUCHES); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_CONTAINS, GEO_CONTAINS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_OVERLAPS, GEO_OVERLAPS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_CROSSES, GEO_CROSSES); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_WITHIN, GEO_WITHIN); - } - - } - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java deleted file mode 100644 index e2f98b30e..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java +++ /dev/null @@ -1,824 +0,0 @@ -package mvm.rya.indexing.accumulo.temporal; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.nio.charset.CharacterCodingException; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map.Entry; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.xml.datatype.XMLGregorianCalendar; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer; -import mvm.rya.accumulo.experimental.AccumuloIndexer; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.indexing.KeyParts; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.TemporalIndexer; -import mvm.rya.indexing.TemporalInstant; -import mvm.rya.indexing.TemporalInterval; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.StatementSerializer; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.MultiTableBatchWriter; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.ScannerBase; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.commons.codec.binary.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; -import org.joda.time.DateTime; -import org.openrdf.model.Literal; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -import cern.colt.Arrays; - -public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements TemporalIndexer { - - private static final Logger logger = Logger.getLogger(AccumuloTemporalIndexer.class); - - private static final String CF_INTERVAL = "interval"; - - - - // Delimiter used in the interval stored in the triple's object literal. - // So far, no ontology specifies a date range, just instants. - // Set to the same delimiter used by the indexer, probably needs revisiting. - //private static final String REGEX_intervalDelimiter = TemporalInterval.DELIMITER; - - private Configuration conf; - - private MultiTableBatchWriter mtbw; - - private BatchWriter temporalIndexBatchWriter; - - private Set validPredicates; - private String temporalIndexTableName; - - private boolean isInit = false; - - - - private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, - TableExistsException { - temporalIndexTableName = ConfigUtils.getTemporalTableName(conf); - // Create one index table on first run. - ConfigUtils.createTableIfNotExists(conf, temporalIndexTableName); - - mtbw = ConfigUtils.createMultitableBatchWriter(conf); - - temporalIndexBatchWriter = mtbw.getBatchWriter(temporalIndexTableName); - - validPredicates = ConfigUtils.getTemporalPredicates(conf); - } - - //initialization occurs in setConf because index is created using reflection - @Override - public void setConf(Configuration conf) { - this.conf = conf; - if (!isInit) { - try { - init(); - isInit = true; - } catch (AccumuloException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (AccumuloSecurityException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (TableNotFoundException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (TableExistsException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } - } - } - - @Override - public Configuration getConf() { - return this.conf; - } - - - /** - * Store a statement in the index if it meets the criterion: Object should be - * a literal and one of the validPredicates from the configuration. - * If it does not meet the criteria, it is silently ignored. - * logs a warning if the object is not parse-able. - * Attempts to parse with calendarValue = literalValue.calendarValue() - * if that fails, tries: org.joda.time.DateTime.parse() . - * T O D O parse an interval using multiple predicates for same subject -- ontology dependent. - */ - private void storeStatement(Statement statement) throws IOException, IllegalArgumentException { - // if the predicate list is empty, accept all predicates. - // Otherwise, make sure the predicate is on the "valid" list - boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate()); - if (!isValidPredicate || !(statement.getObject() instanceof Literal)) - return; - DateTime[] indexDateTimes = new DateTime[2]; // 0 begin, 1 end of interval - extractDateTime(statement, indexDateTimes); - if (indexDateTimes[0]==null) - return; - - // Add this as an instant, or interval. - try { - if (indexDateTimes[1] != null) { - TemporalInterval interval = new TemporalInterval(new TemporalInstantRfc3339(indexDateTimes[0]), new TemporalInstantRfc3339(indexDateTimes[1])); - addInterval(temporalIndexBatchWriter, interval, statement); - } else { - TemporalInstant instant = new TemporalInstantRfc3339(indexDateTimes[0]); - addInstant(temporalIndexBatchWriter, instant, statement); - } - } catch (MutationsRejectedException e) { - throw new IOException("While adding interval/instant for statement =" + statement, e); - } - } - - - @Override - public void storeStatement(RyaStatement statement) throws IllegalArgumentException, IOException { - storeStatement(RyaToRdfConversions.convertStatement(statement)); - } - - - - /** - * parse the literal dates from the object of a statement. - * - * @param statement - * @param outputDateTimes - */ - private void extractDateTime(Statement statement, DateTime[] outputDateTimes) { - if (!(statement.getObject() instanceof Literal)) // Error since it should already be tested by caller. - throw new RuntimeException("Statement's object must be a literal: " + statement); - // throws IllegalArgumentException NumberFormatException if can't parse - String logThis = null; Literal literalValue = (Literal) statement.getObject(); - // First attempt to parse a interval in the form "[date1,date2]" - Matcher matcher = Pattern.compile("\\[(.*)\\,(.*)\\].*").matcher(literalValue.stringValue()); - if (matcher.find()) { - try { - // Got a datetime pair, parse into an interval. - outputDateTimes[0] = new DateTime(matcher.group(1)); - outputDateTimes[1] = new DateTime(matcher.group(2)); - return; - } catch (java.lang.IllegalArgumentException e) { - logThis = e.getMessage() + " " + logThis; - outputDateTimes[0]=null; - outputDateTimes[1]=null; - } - } - - try { - XMLGregorianCalendar calendarValue = literalValue.calendarValue(); - outputDateTimes[0] = new DateTime(calendarValue.toGregorianCalendar()); - outputDateTimes[1] = null; - return; - } catch (java.lang.IllegalArgumentException e) { - logThis = e.getMessage(); - } - // Try again using Joda Time DateTime.parse() - try { - outputDateTimes[0] = DateTime.parse(literalValue.stringValue()); - outputDateTimes[1] = null; - //System.out.println(">>>>>>>Joda parsed: "+literalValue.stringValue()); - return; - } catch (java.lang.IllegalArgumentException e) { - logThis = e.getMessage() + " " + logThis; - } - logger.warn("TemporalIndexer is unable to parse the date/time from statement=" + statement.toString() + " " +logThis); - return; - } - - /** - * Index a new interval - * TODO: integrate into KeyParts (or eliminate) - * @param writer - * @param cv - * @param interval - * @throws MutationsRejectedException - */ - public void addInterval(BatchWriter writer, TemporalInterval interval, Statement statement) throws MutationsRejectedException { - - Value statementValue = new Value(StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement))); - Text cf = new Text(StatementSerializer.writeContext(statement)); - Text cqBegin = new Text(KeyParts.CQ_BEGIN); - Text cqEnd = new Text(KeyParts.CQ_END); - - // Start Begin index - Text keyText =new Text(interval.getAsKeyBeginning()); - KeyParts.appendUniqueness(statement, keyText); - Mutation m = new Mutation(keyText); - m.put(cf, cqBegin, statementValue); - // System.out.println("mutations add begin row=" + m.getRow() + " value=" + value.toString()); - writer.addMutation(m); - - // now the end index: - keyText = new Text(interval.getAsKeyEnd()); - KeyParts.appendUniqueness(statement, keyText); - m = new Mutation(keyText); - m.put(cf, cqEnd, new Value(statementValue)); - // System.out.println("mutations add end row=" + m.getRow() + " value=" + value.toString()); - writer.addMutation(m); - } - - - /** - * Index a new interval - * Make indexes that handle this expression: - * hash( s? p? ) ?o - * == o union hash(s)o union hash(p)o union hash(sp)o - * - * @param writer - * @param cv - * @param instant - * @throws MutationsRejectedException - */ - public void addInstant(BatchWriter writer, TemporalInstant instant, Statement statement) throws MutationsRejectedException { - KeyParts keyParts = new KeyParts(statement, instant); - for (KeyParts k: keyParts) { - Mutation m = new Mutation(k.getStoreKey()); - m.put(k.cf, k.cq,k.getValue()); - writer.addMutation(m); - } - } - - - /** - * creates a scanner and handles all the throwables and nulls. - * - * @param scanner - * @return - * @throws IOException - */ - private Scanner getScanner() throws QueryEvaluationException { - String whileDoing = "While creating a scanner for a temporal query. table name=" + temporalIndexTableName; - Scanner scanner = null; - try { - scanner = ConfigUtils.createScanner(temporalIndexTableName, conf); - } catch (AccumuloException e) { - logger.error(whileDoing, e); - throw new QueryEvaluationException(whileDoing, e); - } catch (AccumuloSecurityException e) { - throw new QueryEvaluationException(whileDoing, e); - } catch (TableNotFoundException e) { - logger.error(whileDoing, e); - throw new QueryEvaluationException(whileDoing - + " The temporal index table should have been created by this constructor, if found missing.", e); - } - return scanner; - } - - private BatchScanner getBatchScanner() throws QueryEvaluationException { - String whileDoing = "While creating a Batch scanner for a temporal query. table name=" + temporalIndexTableName; - try { - return ConfigUtils.createBatchScanner(temporalIndexTableName, conf); - } catch (AccumuloException e) { - logger.error(whileDoing, e); - throw new QueryEvaluationException(whileDoing, e); - } catch (AccumuloSecurityException e) { - throw new QueryEvaluationException(whileDoing, e); - } catch (TableNotFoundException e) { - logger.error(whileDoing, e); - throw new QueryEvaluationException(whileDoing - + " The temporal index table should have been created by this constructor, if found missing. ", e); - } - } - - - /** - * statements where the datetime is exactly the same as the queryInstant. - */ - @Override - public CloseableIteration queryInstantEqualsInstant( - TemporalInstant queryInstant, StatementContraints constraints) - throws QueryEvaluationException { - // get rows where the repository time is equal to the given time in queryInstant. - Query query = new Query() { - @Override - public Range getRange(KeyParts keyParts) { - //System.out.println("Scanning queryInstantEqualsInstant: prefix:" + KeyParts.toHumanString(keyParts.getQueryKey())); - return Range.prefix(keyParts.getQueryKey()); // <-- specific logic - } - }; - ScannerBase scanner = query.doQuery(queryInstant, constraints); - // TODO currently context constraints are filtered on the client. - return getContextIteratorWrapper(scanner, constraints.getContext()); - } - - /** - * get statements where the db row ID is BEFORE the given queryInstant. - */ - @Override - public CloseableIteration queryInstantBeforeInstant( - TemporalInstant queryInstant, StatementContraints constraints) - throws QueryEvaluationException { - // get rows where the repository time is before the given time. - Query query = new Query() { - @Override - public Range getRange(KeyParts keyParts) { - Text start= null; - if (keyParts.constraintPrefix != null ) // Yes, has constraints - start = keyParts.constraintPrefix; // <-- start specific logic - else - start = new Text(KeyParts.HASH_PREFIX_FOLLOWING); - Text endAt = keyParts.getQueryKey(); // <-- end specific logic - //System.out.println("Scanning queryInstantBeforeInstant: from:" + KeyParts.toHumanString(start) + " up to:" + KeyParts.toHumanString(endAt)); - return new Range(start, true, endAt, false); - } - }; - ScannerBase scanner = query.doQuery(queryInstant, constraints); - return getContextIteratorWrapper(scanner, constraints.getContext()); - } - - /** - * get statements where the date object is after the given queryInstant. - */ - @Override - public CloseableIteration queryInstantAfterInstant( - TemporalInstant queryInstant, StatementContraints constraints) - throws QueryEvaluationException { - Query query = new Query() { - @Override - public Range getRange(KeyParts keyParts) { - Text start = Range.followingPrefix(keyParts.getQueryKey()); // <-- specific logic - Text endAt = null; // no constraints // <-- specific logic - if (keyParts.constraintPrefix != null ) // Yes, has constraints - endAt = Range.followingPrefix(keyParts.constraintPrefix); - //System.out.println("Scanning queryInstantAfterInstant from after:" + KeyParts.toHumanString(start) + " up to:" + KeyParts.toHumanString(endAt)); - return new Range(start, true, endAt, false); - } - }; - ScannerBase scanner = query.doQuery(queryInstant, constraints); - return getContextIteratorWrapper(scanner, constraints.getContext()); - } - - /** - * Get instances before a given interval. Returns queryInstantBeforeInstant with the interval's beginning time. - */ - @Override - public CloseableIteration queryInstantBeforeInterval( - TemporalInterval givenInterval, StatementContraints contraints) - throws QueryEvaluationException { - return queryInstantBeforeInstant(givenInterval.getHasBeginning(), contraints); - } - - /** - * Get instances after a given interval. Returns queryInstantAfterInstant with the interval's end time. - */ - @Override - public CloseableIteration queryInstantAfterInterval( - TemporalInterval givenInterval, StatementContraints contraints) throws QueryEvaluationException { - return queryInstantAfterInstant(givenInterval.getHasEnd(), contraints); - } - - /** - * Get instances inside a given interval. - * Returns after interval's beginning time, and before ending time, - * exclusive (don't match the beginning and ending). - */ - @Override - public CloseableIteration queryInstantInsideInterval( - TemporalInterval queryInterval, StatementContraints constraints) - throws QueryEvaluationException { - // get rows where the time is after the given interval's beginning time and before the ending time. - final TemporalInterval theQueryInterval = queryInterval; - Query query = new Query() { - private final TemporalInterval queryInterval = theQueryInterval; - @Override - public Range getRange(KeyParts keyParts) { - Text start = Range.followingPrefix(new Text(keyParts.getQueryKey(queryInterval.getHasBeginning()))); - Text endAt = new Text(keyParts.getQueryKey(queryInterval.getHasEnd())); // <-- end specific logic - //System.out.println("Scanning queryInstantInsideInterval: from excluding:" + KeyParts.toHumanString(start) + " up to:" + KeyParts.toHumanString(endAt)); - return new Range(start, false, endAt, false); - } - }; - ScannerBase scanner = query.doQuery(queryInterval.getHasBeginning(), constraints); - return getContextIteratorWrapper(scanner, constraints.getContext()); - } - /** - * Get instances matching the beginning of a given interval. - */ - @Override - public CloseableIteration queryInstantHasBeginningInterval( - TemporalInterval queryInterval, StatementContraints contraints) - throws QueryEvaluationException { - return queryInstantEqualsInstant(queryInterval.getHasBeginning(), contraints); - } - - /** - * Get instances matching the ending of a given interval. - */ - @Override - public CloseableIteration queryInstantHasEndInterval( - TemporalInterval queryInterval, StatementContraints contraints) - throws QueryEvaluationException { - return queryInstantEqualsInstant(queryInterval.getHasEnd(), contraints); - } - - /** - * Get intervals stored in the repository matching the given interval. - * Indexing Intervals will probably change or be removed. - * Currently predicate and subject constraints are filtered on the client. - */ - @Override - public CloseableIteration queryIntervalEquals( - TemporalInterval query, StatementContraints contraints) - throws QueryEvaluationException { - Scanner scanner = getScanner(); - if (scanner != null) { - // get rows where the start and end match. - Range range = Range.prefix(new Text(query.getAsKeyBeginning())); - scanner.setRange(range); - if (contraints.hasContext()) - scanner.fetchColumn(new Text(contraints.getContext().toString()), new Text(KeyParts.CQ_BEGIN)); - else - scanner.fetchColumn(new Text(""), new Text(KeyParts.CQ_BEGIN)); - } - // Iterator> iter = scanner.iterator(); - // while (iter.hasNext()) { - // System.out.println("queryIntervalEquals results:"+iter.next()); - // } - //return getConstrainedIteratorWrapper(scanner, contraints); - return getIteratorWrapper(scanner); - } - - /** - * find intervals stored in the repository before the given Interval. Find interval endings that are - * before the given beginning. - * Indexing Intervals will probably change or be removed. - * Currently predicate and subject constraints are filtered on the client. - */ - @Override - public CloseableIteration queryIntervalBefore( - TemporalInterval queryInterval, StatementContraints constraints) throws QueryEvaluationException - { - Scanner scanner = getScanner(); - if (scanner != null) { - // get rows where the end date is less than the queryInterval.getBefore() - Range range = new Range(null, false, new Key(new Text(queryInterval.getHasBeginning().getAsKeyBytes())), false); - scanner.setRange(range); - if (constraints.hasContext()) - scanner.fetchColumn(new Text(constraints.getContext().toString()), new Text(KeyParts.CQ_END)); - else - scanner.fetchColumn(new Text(""), new Text(KeyParts.CQ_END)); - } - return getIteratorWrapper(scanner); - } - - /** - * Interval after given interval. Find intervals that begin after the endings of the given interval. - * Use the special following prefix mechanism to avoid matching the beginning date. - * Indexing Intervals will probably change or be removed. - * Currently predicate and subject and context constraints are filtered on the client. - */ - @Override - public CloseableIteration queryIntervalAfter( - TemporalInterval queryInterval, StatementContraints constraints) - throws QueryEvaluationException { - - Scanner scanner = getScanner(); - if (scanner != null) { - // get rows where the start date is greater than the queryInterval.getEnd() - Range range = new Range(new Key(Range.followingPrefix(new Text(queryInterval.getHasEnd().getAsKeyBytes()))), false, null, true); - scanner.setRange(range); - - if (constraints.hasContext()) - scanner.fetchColumn(new Text(constraints.getContext().toString()), new Text(KeyParts.CQ_BEGIN)); - else - scanner.fetchColumn(new Text(""), new Text(KeyParts.CQ_BEGIN)); - } - // TODO currently predicate, subject and context constraints are filtered on the clients - return getIteratorWrapper(scanner); - } - // -- - // -- END of Query functions. Next up, general stuff used by the queries above. - // -- - - /** - * Allows passing range specific logic into doQuery. - * Each query function implements an anonymous instance of this and calls it's doQuery(). - */ - abstract class Query { - abstract protected Range getRange(KeyParts keyParts); - - public ScannerBase doQuery(TemporalInstant queryInstant, StatementContraints constraints) throws QueryEvaluationException { - // key is contraintPrefix + time, or just time. - // Any constraints handled here, if the constraints are empty, the - // thisKeyParts.contraintPrefix will be null. - List keyParts = KeyParts.keyPartsForQuery(queryInstant, constraints); - ScannerBase scanner = null; - if (keyParts.size() > 1) - scanner = getBatchScanner(); - else - scanner = getScanner(); - - Collection ranges = new HashSet(); - KeyParts lastKeyParts = null; - Range range = null; - for (KeyParts thisKeyParts : keyParts) { - range = this.getRange(thisKeyParts); - ranges.add(range); - lastKeyParts = thisKeyParts; - } - //System.out.println("Scanning columns, cf:" + lastKeyParts.cf + "CQ:" + lastKeyParts.cq); - scanner.fetchColumn(new Text(lastKeyParts.cf), new Text(lastKeyParts.cq)); - if (scanner instanceof BatchScanner) - ((BatchScanner) scanner).setRanges(ranges); - else if (range != null) - ((Scanner) scanner).setRange(range); - return scanner; - } - } - - /** - * An iteration wrapper for a loaded scanner that is returned for each query above. - * - * @param scanner - * the results to iterate, then close. - * @return an anonymous object that will iterate the resulting statements from a given scanner. - */ - private static CloseableIteration getIteratorWrapper(final ScannerBase scanner) { - - final Iterator> i = scanner.iterator(); - - return new CloseableIteration() { - @Override - public boolean hasNext() { - return i.hasNext(); - } - - @Override - public Statement next() throws QueryEvaluationException { - Entry entry = i.next(); - Value v = entry.getValue(); - try { - String dataString = Text.decode(v.get(), 0, v.getSize()); - Statement s = StatementSerializer.readStatement(dataString); - return s; - } catch (CharacterCodingException e) { - logger.error("Error decoding value=" + Arrays.toString(v.get()), e); - throw new QueryEvaluationException(e); - } catch (IOException e) { - logger.error("Error de-serializing statement, string=" + v.get(), e); - throw new QueryEvaluationException(e); - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Remove not implemented"); - } - - @Override - public void close() throws QueryEvaluationException { - scanner.close(); - } - }; - } - - - /** - * An iteration wrapper for a loaded scanner that is returned for partially supported interval queries above. - * - * @param scanner the results to iterate, then close. - * @param constraints limit statements returned by next() to those matching the constraints. - * @return an anonymous object that will iterate the resulting statements from a given scanner. - * @throws QueryEvaluationException - */ - private static CloseableIteration getConstrainedIteratorWrapper(final Scanner scanner, final StatementContraints constraints) { - if (!constraints.hasContext() && !constraints.hasSubject() && !constraints.hasPredicates()) - return getIteratorWrapper(scanner); - return new ConstrainedIteratorWrapper(scanner) { - @Override - public boolean allowedBy(Statement statement) { - return allowedByConstraints(statement, constraints); - } - }; - } - /** - * An iteration wrapper for a loaded scanner that is returned for queries above. - * Currently, this temporal index supports contexts only on the client, using this filter. - * - * @param scanner the results to iterate, then close. - * @param constraints limit statements returned by next() to those matching the constraints. - * @return an anonymous object that will iterate the resulting statements from a given scanner. - * @throws QueryEvaluationException - */ - private static CloseableIteration getContextIteratorWrapper(final ScannerBase scanner, final Resource context) { - if (context==null) - return getIteratorWrapper(scanner); - return new ConstrainedIteratorWrapper(scanner) { - @Override - public boolean allowedBy(Statement statement) { - return allowedByContext(statement, context); - } - }; - } - /** - * Wrap a scanner in a iterator that will filter statements based on a boolean allowedBy(). - * If the allowedBy function returns false for the next statement, it is skipped. - * This is used for to do client side, what the index cannot (yet) do on the server side. - */ - abstract static class ConstrainedIteratorWrapper implements CloseableIteration { - private Statement nextStatement=null; - private boolean isInitialized = false; - final private Iterator> i; - final private ScannerBase scanner; - - ConstrainedIteratorWrapper(ScannerBase scanner) { - this.scanner = scanner; - i=scanner.iterator(); - } - @Override - public boolean hasNext() throws QueryEvaluationException { - if (!isInitialized) - internalGetNext(); - return (nextStatement != null) ; - } - - @Override - public Statement next() throws QueryEvaluationException { - if (nextStatement==null) { - if (!isInitialized) - internalGetNext(); - if (nextStatement==null) - throw new NoSuchElementException(); - } - // use this one, then get the next one loaded. - Statement thisStatement = this.nextStatement; - internalGetNext(); - return thisStatement; - } - - /** - * Gets the next statement meeting constraints and stores in nextStatement. - * Sets null when all done, or on exception. - * @throws QueryEvaluationException - */ - private void internalGetNext() - throws QueryEvaluationException { - isInitialized=true; - this.nextStatement = null; // Default on done or error. - Statement statement = null; - while (i.hasNext()) { - Entry entry = i.next(); - Value v = entry.getValue(); - try { - String dataString = Text.decode(v.get(), 0, v.getSize()); - statement = StatementSerializer.readStatement(dataString); - } catch (CharacterCodingException e) { - logger.error("Error decoding value=" + Arrays.toString(v.get()), e); - throw new QueryEvaluationException(e); - } catch (IOException e) { - logger.error("Error de-serializing statement, string=" + v.get(), e); - throw new QueryEvaluationException(e); - } - if (allowedBy(statement)) { - this.nextStatement = statement; - return; - } - } - } - public abstract boolean allowedBy(Statement s); - - @Override - public void remove() { - throw new UnsupportedOperationException("Remove not implemented"); - } - - @Override - public void close() throws QueryEvaluationException { - scanner.close(); - } - } - - /** - * Does the statement meet the constraints? Match predicate, subject, and context. - * @param statement Candidate statement to be allowed or not. - * @param contraints fields that are non-null must match the statement's components, otherwise it is not allowed. - * @return true if the parts of the statement match the statementConstraints' parts. - */ - protected static boolean allowedByConstraints(Statement statement, StatementContraints constraints) { - - if (constraints.hasSubject() && ! constraints.getSubject().toString().equals(statement.getSubject().toString())) - {System.out.println("Constrain subject: "+constraints.getSubject()+" != " + statement.getSubject()); return false;} - //return false; - - if (! allowedByContext(statement, constraints.getContext())) - return false; - //{System.out.println("Constrain context: "+constraints.getContext()+" != " + statement.getContext()); return false;} - - if (constraints.hasPredicates() && ! constraints.getPredicates().contains(statement.getPredicate())) - return false; - //{System.out.println("Constrain predicate: "+constraints.getPredicates()+" != " + statement.getPredicate()); return false;} - - System.out.println("allow statement: "+ statement.toString()); - return true; - } - - /** - * Allow only if the context matches the statement. This is a client side filter. - * @param statement - * @param context - * @return - */ - protected static boolean allowedByContext(Statement statement, Resource context) { - return context==null || context.equals( statement.getContext() ); - } - - @Override - public Set getIndexablePredicates() { - - return validPredicates; - } - - /** - * Flush the data to the batchwriter. - * Throws a IOException as required by the flushable interface, - * wrapping MutationsRejectedException. - */ - @Override - public void flush() throws IOException { - try { - mtbw.flush(); - } catch (MutationsRejectedException e) { - String msg = "Error while flushing the batch writer."; - logger.error(msg, e); - throw new IOException(msg, e); - } - } - - /** - * Close batchwriter. - * Throws a IOException as required by the flushable interface, - * wrapping MutationsRejectedException. - */ - @Override - public void close() throws IOException { - try { - - mtbw.close(); - - } catch (MutationsRejectedException e) { - String msg = "Error while closing the batch writer."; - logger.error(msg, e); - throw new IOException(msg, e); - } - } - - - - @Override - public String getTableName() { - return ConfigUtils.getTemporalTableName(conf); - } - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java deleted file mode 100644 index a69a79fa5..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java +++ /dev/null @@ -1,218 +0,0 @@ -/** - * - */ -package mvm.rya.indexing.accumulo.temporal; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import mvm.rya.indexing.TemporalInstant; -import mvm.rya.indexing.TemporalInterval; - -import org.apache.commons.codec.binary.StringUtils; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; - -/** - * Immutable date and time instance returning a human readable key. - * Preserves the Time zone, but not stored in the key. - * Converts fields (hours, etc) correctly for tz=Zulu when stored, - * so the original timezone is not preserved when retrieved. - * - * Uses rfc 3339, which looks like: YYYY-MM-DDThh:mm:ssZ a subset - * of ISO-8601 : https://www.ietf.org/rfc/rfc3339.txt - * - * Limits: All dates and times are assumed to be in the "current era", no BC, - * somewhere between 0000AD and 9999AD. - * - * Resolution: to the second, or millisecond if the optional fraction is used. - * - * This is really a wrapper for Joda DateTime. if you need functionality from - * that wonderful class, simply use t.getAsDateTime(). - * - */ -public class TemporalInstantRfc3339 implements TemporalInstant { - - private static final long serialVersionUID = -7790000399142290309L; - - private final DateTime dateTime; - /** - * Format key like this: YYYY-MM-DDThh:mm:ssZ - */ - public final static DateTimeFormatter FORMATTER = ISODateTimeFormat.dateTimeNoMillis(); - - /** - * New date assumed UTC time zone. - * - * @param year - * @param month - * @param day - * @param hour - * @param minute - * @param second - */ - public TemporalInstantRfc3339(int year, int month, int day, int hour, int minute, int second) { - dateTime = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC); - } - - /** - * Construct with a Joda/java v8 DateTime; - * TZ is preserved, but not in the key. - * - * @param dateTime - * initialize with this date time. Converted to zulu time zone for key generation. - * @return - */ - public TemporalInstantRfc3339(DateTime datetime) { - this.dateTime = datetime; - } - /** - * Get an interval setting beginning and end with this implementation of {@link TemporalInstant}. - * beginning must be less than end. - * - * @param dateTimeInterval String in the form [dateTime1,dateTime2] - */ - public static TemporalInterval parseInterval(String dateTimeInterval) { - - Matcher matcher = Pattern.compile("\\[(.*)\\,(.*)\\].*").matcher(dateTimeInterval); - if (matcher.find()) { - // Got a date time pair, parse into an interval. - return new TemporalInterval( - new TemporalInstantRfc3339(new DateTime(matcher.group(1))), - new TemporalInstantRfc3339(new DateTime(matcher.group(2)))); - } - throw new IllegalArgumentException("Can't parse interval, expecting '[ISO8601dateTime1,ISO8601dateTime2]', actual: "+dateTimeInterval); - } - - /** - * if this is older returns -1, equal 0, else 1 - * - */ - @Override - public int compareTo(TemporalInstant that) { - return this.getAsKeyString().compareTo(that.getAsKeyString()); - } - - @Override - public byte[] getAsKeyBytes() { - return StringUtils.getBytesUtf8(getAsKeyString()); - } - - @Override - public String getAsKeyString() { - return dateTime.withZone(DateTimeZone.UTC).toString(FORMATTER); - } - - /** - * Readable string, formated local time at {@link DateTimeZone}. - * If the timezone is UTC (Z), it was probably a key from the database. - * If the server and client are in different Time zone, should probably use the client timezone. - * - * Time at specified time zone: - * instant.getAsReadable(DateTimeZone.forID("-05:00"))); - * instant.getAsReadable(DateTimeZone.getDefault())); - * - * Use original time zone set in the constructor: - * instant.getAsDateTime().toString(TemporalInstantRfc3339.FORMATTER)); - * - */ - @Override - public String getAsReadable(DateTimeZone dateTimeZone) { - return dateTime.withZone(dateTimeZone).toString(FORMATTER); - } - - /** - * Use original time zone set in the constructor, or UTC if from parsing the key. - */ - @Override - public String getAsReadable() { - return dateTime.toString(FORMATTER); - } - - /** - * default toString, same as getAsReadable(). - */ - @Override - public String toString() { - return getAsReadable(); - } - - /** - * Show readable time converted to the default timezone. - */ - @Override - public DateTime getAsDateTime() { - return dateTime; - } - - /** - * Minimum Date, used for infinitely past. - */ - private static final TemporalInstant MINIMUM = new TemporalInstantRfc3339(new DateTime(Long.MIN_VALUE)); - /** - * maximum date/time is used for infinitely in the future. - */ - private static final TemporalInstant MAXIMUM = new TemporalInstantRfc3339(new DateTime(Long.MAX_VALUE)); - - /** - * infinite past date. - * @return an instant that will compare as NEWER than anything but itself. - */ - public static TemporalInstant getMinimumInstance() { - return MINIMUM; - } - /** - * infinite future date. - * @return an instant that will compare as OLDER than anything but itself - */ - - public static TemporalInstant getMaximumInstance() { - return MAXIMUM; - } - - /* (non-Javadoc) - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - return this.getAsKeyString().hashCode(); - } - - /* (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - TemporalInstantRfc3339 other = (TemporalInstantRfc3339) obj; - return (this.getAsKeyString().equals(other.getAsKeyString())); - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java deleted file mode 100644 index f2ed8c44e..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java +++ /dev/null @@ -1,320 +0,0 @@ -package mvm.rya.indexing.accumulo.temporal; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Map; -import java.util.Set; - -import mvm.rya.indexing.IndexingExpr; -import mvm.rya.indexing.IteratorFactory; -import mvm.rya.indexing.SearchFunction; -import mvm.rya.indexing.SearchFunctionFactory; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.TemporalIndexer; -import mvm.rya.indexing.TemporalInstant; -import mvm.rya.indexing.TemporalInterval; -import mvm.rya.indexing.accumulo.geo.GeoTupleSet; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.apache.hadoop.conf.Configuration; -import org.joda.time.DateTime; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.QueryModelVisitor; - -import com.google.common.base.Joiner; -import com.google.common.collect.Maps; - -//Indexing Node for temporal expressions to be inserted into execution plan -//to delegate temporal portion of query to temporal index -public class TemporalTupleSet extends ExternalTupleSet { - - private Configuration conf; - private TemporalIndexer temporalIndexer; - private IndexingExpr filterInfo; - - - public TemporalTupleSet(IndexingExpr filterInfo, TemporalIndexer temporalIndexer) { - this.filterInfo = filterInfo; - this.temporalIndexer = temporalIndexer; - this.conf = temporalIndexer.getConf(); - } - - /** - * {@inheritDoc} - */ - @Override - public Set getBindingNames() { - return filterInfo.getBindingNames(); - } - - /** - * {@inheritDoc} - *

- * Note that we need a deep copy for everything that (during optimizations) - * can be altered via {@link #visitChildren(QueryModelVisitor)} - */ - public TemporalTupleSet clone() { - return new TemporalTupleSet(filterInfo, temporalIndexer); - } - - @Override - public double cardinality() { - return 0.0; // No idea how the estimate cardinality here. - } - - - @Override - public String getSignature() { - - return "(TemporalTuple Projection) " + "variables: " + Joiner.on(", ").join(this.getBindingNames()).replaceAll("\\s+", " "); - } - - @Override - public boolean equals(Object other) { - if (other == this) { - return true; - } - if (!(other instanceof TemporalTupleSet)) { - return false; - } - TemporalTupleSet arg = (TemporalTupleSet) other; - return this.filterInfo.equals(arg.filterInfo); - } - - - @Override - public int hashCode() { - int result = 17; - result = 31*result + filterInfo.hashCode(); - - return result; - } - - - /** - * Returns an iterator over the result set associated with contained IndexingExpr. - *

- * Should be thread-safe (concurrent invocation {@link OfflineIterable} this - * method can be expected with some query evaluators. - */ - @Override - public CloseableIteration evaluate(BindingSet bindings) - throws QueryEvaluationException { - - - URI funcURI = filterInfo.getFunction(); - SearchFunction searchFunction = (new TemporalSearchFunctionFactory(conf)).getSearchFunction(funcURI); - - if(filterInfo.getArguments().length > 1) { - throw new IllegalArgumentException("Index functions do not support more than two arguments."); - } - - String queryText = filterInfo.getArguments()[0].stringValue(); - - return IteratorFactory.getIterator(filterInfo.getSpConstraint(), bindings, queryText, searchFunction); - } - - - //returns appropriate search function for a given URI - //search functions used by TemporalIndexer to query Temporal Index - private class TemporalSearchFunctionFactory { - - private final Map SEARCH_FUNCTION_MAP = Maps.newHashMap(); - Configuration conf; - - public TemporalSearchFunctionFactory(Configuration conf) { - this.conf = conf; - } - - - /** - * Get a {@link TemporalSearchFunction} for a give URI. - * - * @param searchFunction - * @return - */ - public SearchFunction getSearchFunction(final URI searchFunction) { - - SearchFunction geoFunc = null; - - try { - geoFunc = getSearchFunctionInternal(searchFunction); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - } - - return geoFunc; - } - - private SearchFunction getSearchFunctionInternal(final URI searchFunction) throws QueryEvaluationException { - SearchFunction sf = SEARCH_FUNCTION_MAP.get(searchFunction); - - if (sf != null) { - return sf; - } else { - throw new QueryEvaluationException("Unknown Search Function: " + searchFunction.stringValue()); - } - - - } - - - - private final SearchFunction TEMPORAL_InstantAfterInstant = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInstant queryInstant = new TemporalInstantRfc3339(DateTime.parse(searchTerms)); - return temporalIndexer.queryInstantAfterInstant(queryInstant, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantAfterInstant"; - }; - }; - private final SearchFunction TEMPORAL_InstantBeforeInstant = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInstant queryInstant = new TemporalInstantRfc3339(DateTime.parse(searchTerms)); - return temporalIndexer.queryInstantBeforeInstant(queryInstant, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantBeforeInstant"; - }; - }; - - private final SearchFunction TEMPORAL_InstantEqualsInstant = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInstant queryInstant = new TemporalInstantRfc3339(DateTime.parse(searchTerms)); - return temporalIndexer.queryInstantEqualsInstant(queryInstant, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantEqualsInstant"; - }; - }; - - private final SearchFunction TEMPORAL_InstantAfterInterval = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInterval queryInterval = TemporalInstantRfc3339.parseInterval(searchTerms); - return temporalIndexer.queryInstantAfterInterval(queryInterval, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantAfterInterval"; - }; - }; - - private final SearchFunction TEMPORAL_InstantBeforeInterval = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInterval queryInterval = TemporalInstantRfc3339.parseInterval(searchTerms); - return temporalIndexer.queryInstantBeforeInterval(queryInterval, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantBeforeInterval"; - }; - }; - - private final SearchFunction TEMPORAL_InstantInsideInterval = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInterval queryInterval = TemporalInstantRfc3339.parseInterval(searchTerms); - return temporalIndexer.queryInstantInsideInterval(queryInterval, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantInsideInterval"; - }; - }; - - private final SearchFunction TEMPORAL_InstantHasBeginningInterval = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInterval queryInterval = TemporalInstantRfc3339.parseInterval(searchTerms); - return temporalIndexer.queryInstantHasBeginningInterval(queryInterval, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantHasBeginningInterval"; - }; - }; - - private final SearchFunction TEMPORAL_InstantHasEndInterval = new SearchFunction() { - @Override - public CloseableIteration performSearch(String searchTerms, - StatementContraints contraints) throws QueryEvaluationException { - TemporalInterval queryInterval = TemporalInstantRfc3339.parseInterval(searchTerms); - return temporalIndexer.queryInstantHasEndInterval(queryInterval, contraints); - } - - @Override - public String toString() { - return "TEMPORAL_InstantHasEndInterval"; - }; - }; - - { - - String TEMPORAL_NS = "tag:rya-rdf.org,2015:temporal#"; - - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"after"), TEMPORAL_InstantAfterInstant); - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"before"), TEMPORAL_InstantBeforeInstant); - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"equals"), TEMPORAL_InstantEqualsInstant); - - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"beforeInterval"), TEMPORAL_InstantBeforeInterval); - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"afterInterval"), TEMPORAL_InstantAfterInterval); - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"insideInterval"), TEMPORAL_InstantInsideInterval); - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"hasBeginningInterval"), - TEMPORAL_InstantHasBeginningInterval); - SEARCH_FUNCTION_MAP.put(new URIImpl(TEMPORAL_NS+"hasEndInterval"), TEMPORAL_InstantHasEndInterval); - - } - - - - - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java deleted file mode 100644 index c4e55be85..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java +++ /dev/null @@ -1,219 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.File; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; - -import com.beust.jcommander.internal.Maps; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; - -public class ExternalIndexMain { - - private static String userStr = ""; - private static String passStr = ""; - - private static String instStr = ""; - private static String zooStr = ""; - - private static String tablePrefix = ""; - - private static String AUTHS = ""; - - public static void main(String[] args) throws Exception { - Preconditions.checkArgument(args.length == 6, "java " + ExternalIndexMain.class.getCanonicalName() - + " sparqlFile cbinstance cbzk cbuser cbpassword rdfTablePrefix."); - - final String sparqlFile = args[0]; - - instStr = args[1]; - zooStr = args[2]; - userStr = args[3]; - passStr = args[4]; - tablePrefix = args[5]; - - String queryString = FileUtils.readFileToString(new File(sparqlFile)); - - - // Look for Extra Indexes - Instance inst = new ZooKeeperInstance(instStr, zooStr); - Connector c = inst.getConnector(userStr, passStr.getBytes()); - - System.out.println("Searching for Indexes"); - Map indexTables = Maps.newLinkedHashMap(); - for (String table : c.tableOperations().list()) { - if (table.startsWith(tablePrefix + "INDEX_")) { - Scanner s = c.createScanner(table, new Authorizations()); - s.setRange(Range.exact(new Text("~SPARQL"))); - for (Entry e : s) { - indexTables.put(table, e.getValue().toString()); - } - } - } - - List index = Lists.newArrayList(); - - if (indexTables.isEmpty()) { - System.out.println("No Index found"); - } else { - for (String table : indexTables.keySet()) { - String indexSparqlString = indexTables.get(table); - System.out.println("====================== INDEX FOUND ======================"); - System.out.println(" table : " + table); - System.out.println(" sparql : "); - System.out.println(indexSparqlString); - - index.add(new AccumuloIndexSet(indexSparqlString, c, table)); - } - } - - // Connect to Rya - Sail s = getRyaSail(); - SailRepository repo = new SailRepository(s); - repo.initialize(); - - // Perform Query - - CountingTupleQueryResultHandler count = new CountingTupleQueryResultHandler(); - - SailRepositoryConnection conn; - if (index.isEmpty()) { - conn = repo.getConnection(); - - } else { - ExternalProcessor processor = new ExternalProcessor(index); - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - smartSailRepo.initialize(); - - conn = smartSailRepo.getConnection(); - } - - startTime = System.currentTimeMillis(); - lastTime = startTime; - System.out.println("Query Started"); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(count); - - System.out.println("Count of Results found : " + count.i); - System.out.println("Total query time (s) : " + (System.currentTimeMillis() - startTime) / 1000.); - } - - static long lastTime = 0; - static long startTime = 0; - - private static class CountingTupleQueryResultHandler implements TupleQueryResultHandler { - public int i = 0; - - @Override - public void handleBoolean(boolean value) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List linkUrls) throws QueryResultHandlerException { - } - - @Override - public void startQueryResult(List bindingNames) throws TupleQueryResultHandlerException { - System.out.println("First Result Recieved (s) : " + (System.currentTimeMillis() - startTime) / 1000.); - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - i++; - if (i % 10 == 0) { - long mark = System.currentTimeMillis(); - System.out.println("Count : " + i + ". Time (s) : " + (mark - lastTime) / 1000.); - lastTime = mark; - } - - } - - } - - private static Configuration getConf() { - - Configuration conf = new Configuration(); - - conf.set(ConfigUtils.CLOUDBASE_USER, userStr); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, passStr); - - conf.set(ConfigUtils.CLOUDBASE_INSTANCE, instStr); - conf.set(ConfigUtils.CLOUDBASE_ZOOKEEPERS, zooStr); - - conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS); - conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, true); - return conf; - } - - private static Sail getRyaSail() throws AccumuloException, AccumuloSecurityException { - - Connector connector = ConfigUtils.getConnector(getConf()); - - final RdfCloudTripleStore store = new RdfCloudTripleStore(); - AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); - crdfdao.setConnector(connector); - - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(getConf()); - conf.setTablePrefix(tablePrefix); - crdfdao.setConf(conf); - store.setRyaDAO(crdfdao); - - return store; - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java deleted file mode 100644 index 2c6d92481..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java +++ /dev/null @@ -1,726 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.indexing.external.QueryVariableNormalizer.VarCollector; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.openrdf.query.algebra.BindingSetAssignment; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; - -import com.google.common.collect.BiMap; -import com.google.common.collect.HashBiMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -/** - * Processes a {@link TupleExpr} and replaces sets of elements in the tree with {@link ExternalTupleSet} objects. - */ -public class ExternalProcessor { - - private List indexSet; - - public ExternalProcessor(List indexSet) { - this.indexSet = indexSet; - } - - /** - * Iterates through list of indexes and replaces all subtrees of query which match index with external tuple object built from index. - * - * @param query - * @return TupleExpr - */ - public TupleExpr process(TupleExpr query) { - TupleExpr rtn = query.clone(); - - - //move BindingSetAssignment Nodes out of the way - organizeBSAs(rtn); - - - // test to see if query contains no other nodes - // than filter, join, projection, and statement pattern and - // test whether query contains duplicate StatementPatterns and filters - if (isTupleValid(rtn)) { - - for (ExternalTupleSet index : indexSet) { - - // test to see if index contains at least one StatementPattern, - // that StatementPatterns are unique, - // and that all variables found in filters occur in some - // StatementPattern - if (isTupleValid(index.getTupleExpr())) { - - List normalize = getMatches(rtn, index.getTupleExpr()); - - for (TupleExpr tup : normalize) { - ExternalTupleSet eTup = (ExternalTupleSet) index.clone(); - setTableMap(tup, eTup); - setSupportedVarOrderMap(eTup); - eTup.setProjectionExpr((Projection) tup); - SPBubbleDownVisitor indexVistor = new SPBubbleDownVisitor(eTup); - rtn.visit(indexVistor); - FilterBubbleManager fbmv = new FilterBubbleManager(eTup); - rtn.visit(fbmv); - SubsetEqualsVisitor subIndexVis = new SubsetEqualsVisitor(eTup); - rtn.visit(subIndexVis); - - } - } - - } - - return rtn; - } else { - throw new IllegalArgumentException("Invalid Query."); - } - } - - - private void setTableMap(TupleExpr tupleMatch, ExternalTupleSet index) { - - List replacementVars = Lists.newArrayList(tupleMatch.getBindingNames()); - List tableVars = Lists.newArrayList(index.getTupleExpr().getBindingNames()); - - Map tableMap = Maps.newHashMap(); - - for(int i = 0; i < tableVars.size(); i++) { - tableMap.put(replacementVars.get(i), tableVars.get(i)); - } - index.setTableVarMap(tableMap); - - - } - - private void setSupportedVarOrderMap(ExternalTupleSet index) { - - Map> supportedVarOrders = Maps.newHashMap(); - BiMap biMap = HashBiMap.create(index.getTableVarMap()).inverse(); - Map> oldSupportedVarOrders = index.getSupportedVariableOrderMap(); - - Set temp = null; - Set keys = oldSupportedVarOrders.keySet(); - - for (String s : keys) { - temp = oldSupportedVarOrders.get(s); - Set newSet = Sets.newHashSet(); - - for (String t : temp) { - newSet.add(biMap.get(t)); - } - - String[] tempStrings = s.split("\u0000"); - String v = ""; - for(String u: tempStrings) { - if(v.length() == 0){ - v = v + biMap.get(u); - } else { - v = v + "\u0000" + biMap.get(u); - } - } - - supportedVarOrders.put(v, newSet); - - } - - index.setSupportedVariableOrderMap(supportedVarOrders); - - } - - - - private List getMatches(TupleExpr query, TupleExpr tuple) { - - try { - List list = QueryVariableNormalizer.getNormalizedIndex(query, tuple); - // System.out.println("Match list is " + list); - return list; - } catch (Exception e) { - System.out.println(e); - } - - return new ArrayList(); - - } - - // determines whether query is valid, which requires that a - // query must contain a StatementPattern, not contain duplicate - // Statement Patterns or Filters, not be comprised of only Projection, - // Join, StatementPattern, and Filter nodes, and that any variable - // appearing in a Filter must appear in a StatementPattern. - private static boolean isTupleValid(QueryModelNode node) { - - ValidQueryVisitor vqv = new ValidQueryVisitor(); - node.visit(vqv); - - Set spVars = getVarNames(getQNodes("sp", node)); - - if (vqv.isValid() && (spVars.size() > 0)) { - - FilterCollector fvis = new FilterCollector(); - node.visit(fvis); - List fList = fvis.getFilters(); - return (fList.size() == Sets.newHashSet(fList).size() && getVarNames(fList).size() <= spVars.size()); - - } else { - return false; - } - } - - private static Set getQNodes(QueryModelNode queryNode) { - Set rtns = new HashSet(); - - StatementPatternCollector spc = new StatementPatternCollector(); - queryNode.visit(spc); - rtns.addAll(spc.getStatementPatterns()); - - FilterCollector fvis = new FilterCollector(); - queryNode.visit(fvis); - rtns.addAll(fvis.getFilters()); - - ExternalTupleCollector eVis = new ExternalTupleCollector(); - queryNode.visit(eVis); - rtns.addAll(eVis.getExtTup()); - - return rtns; - } - - private static Set getQNodes(String node, QueryModelNode queryNode) { - - if (node.equals("sp")) { - Set eSet = new HashSet(); - StatementPatternCollector spc = new StatementPatternCollector(); - queryNode.visit(spc); - List spList = spc.getStatementPatterns(); - eSet.addAll(spList); - // returns empty set if list contains duplicate StatementPatterns - if (spList.size() > eSet.size()) { - return Sets.newHashSet(); - } else { - return eSet; - } - } else if (node.equals("filter")) { - - FilterCollector fvis = new FilterCollector(); - queryNode.visit(fvis); - - return Sets.newHashSet(fvis.getFilters()); - } else { - - throw new IllegalArgumentException("Invalid node type."); - } - } - - // moves StatementPatterns in query that also occur in index to bottom of - // query tree. - private static class SPBubbleDownVisitor extends QueryModelVisitorBase { - - private TupleExpr tuple; - private QueryModelNode indexQNode; - private Set sSet = Sets.newHashSet(); - - public SPBubbleDownVisitor(ExternalTupleSet index) { - - this.tuple = index.getTupleExpr(); - indexQNode = ((Projection) tuple).getArg(); - sSet = getQNodes("sp", indexQNode); - - } - - public void meet(Projection node) { - // moves external tuples above statement patterns before attempting - // to bubble down index statement patterns found in query tree - - organizeExtTuples(node); - - super.meet(node); - } - - public void meet(Join node) { - // if right node contained in index, move it to bottom of query tree - if (sSet.contains(node.getRightArg())) { - - Set eSet = getQNodes("sp", node); - Set compSet = Sets.difference(eSet, sSet); - - if (eSet.containsAll(sSet)) { - - QNodeExchanger qne = new QNodeExchanger(node.getRightArg(), compSet); - node.visit(qne); - node.replaceChildNode(node.getRightArg(), qne.getReplaced()); - - super.meet(node); - } - return; - } - // if left node contained in index, move it to bottom of query tree - else if (sSet.contains(node.getLeftArg())) { - - Set eSet = getQNodes("sp", node); - Set compSet = Sets.difference(eSet, sSet); - - if (eSet.containsAll(sSet)) { - - QNodeExchanger qne = new QNodeExchanger(node.getLeftArg(), compSet); - node.visit(qne); - node.replaceChildNode(node.getLeftArg(), qne.getReplaced()); - - super.meet(node); - } - return; - - } else { - super.meet(node); - } - - } - - // moves all ExternalTupleSets in query tree above remaining - // StatementPatterns - private static void organizeExtTuples(QueryModelNode node) { - - ExternalTupleCollector eVis = new ExternalTupleCollector(); - node.visit(eVis); - - ExtTupleExchangeVisitor oev = new ExtTupleExchangeVisitor(eVis.getExtTup()); - node.visit(oev); - } - - } - - // given a replacement QueryModelNode and compSet, this visitor replaces the - // first - // element in the query tree that occurs in compSet with replacement and - // returns - // the element that was replaced. - private static class QNodeExchanger extends QueryModelVisitorBase { - - private QueryModelNode toBeReplaced; - private QueryModelNode replacement; - private Set compSet; - - public QNodeExchanger(QueryModelNode replacement, Set compSet) { - this.replacement = replacement; - this.toBeReplaced = replacement; - this.compSet = compSet; - } - - public QueryModelNode getReplaced() { - return toBeReplaced; - } - - public void meet(Join node) { - - if (compSet.contains(node.getRightArg())) { - this.toBeReplaced = node.getRightArg(); - node.replaceChildNode(node.getRightArg(), replacement); - return; - } else if (compSet.contains(node.getLeftArg())) { - this.toBeReplaced = node.getLeftArg(); - node.replaceChildNode(node.getLeftArg(), replacement); - return; - } else { - super.meet(node); - } - - } - - } - - // moves filter that occurs in both query and index down the query tree so - // that that it is positioned - // above statement patterns associated with index. Precondition for calling - // this method is that - // SPBubbleDownVisitor has been called to position index StatementPatterns - // within query tree. - //TODO this visitor assumes that all filters are positioned at top of query tree - //could lead to problems if filter optimizer called before external processor - private static class FilterBubbleDownVisitor extends QueryModelVisitorBase { - - private QueryModelNode filter; - private Set compSet; - private boolean filterPlaced = false; - - public FilterBubbleDownVisitor(QueryModelNode filter, Set compSet) { - this.filter = filter; - this.compSet = compSet; - - } - - public boolean filterPlaced() { - return filterPlaced; - } - - public void meet(Join node) { - - if (!compSet.contains(node.getRightArg())) { - // looks for placed to position filter node. if right node is - // contained in index - // and left node is statement pattern node contained in index or - // is a join, place - // filter above join. - if (node.getLeftArg() instanceof Join || !(compSet.contains(node.getLeftArg()))) { - - QueryModelNode pNode = node.getParentNode(); - ((Filter) filter).setArg(node); - pNode.replaceChildNode(node, filter); - filterPlaced = true; - - return; - } // otherwise place filter below join and above right arg - else { - ((Filter) filter).setArg(node.getRightArg()); - node.replaceChildNode(node.getRightArg(), filter); - filterPlaced = true; - return; - - } - } else if ((node.getLeftArg() instanceof StatementPattern) && !compSet.contains(node.getLeftArg())) { - - ((Filter) filter).setArg(node.getLeftArg()); - node.replaceChildNode(node.getLeftArg(), filter); - filterPlaced = true; - - return; - } else { - super.meet(node); - } - } - - } - - private static Set getVarNames(Collection nodes) { - - List tempVars; - Set nodeVarNames = Sets.newHashSet(); - - for (QueryModelNode s : nodes) { - tempVars = VarCollector.process(s); - for (String t : tempVars) - nodeVarNames.add(t); - } - return nodeVarNames; - - } - - // visitor which determines whether or not to reposition a filter by calling - // FilterBubbleDownVisitor - private static class FilterBubbleManager extends QueryModelVisitorBase { - - private TupleExpr tuple; - private QueryModelNode indexQNode; - private Set sSet = Sets.newHashSet(); - private Set bubbledFilters = Sets.newHashSet(); - - public FilterBubbleManager(ExternalTupleSet index) { - this.tuple = index.getTupleExpr(); - indexQNode = ((Projection) tuple).getArg(); - sSet = getQNodes(indexQNode); - - } - - public void meet(Filter node) { - - Set eSet = getQNodes(node); - Set compSet = Sets.difference(eSet, sSet); - - // if index contains filter node and it hasn't already been moved, - // move it down - // query tree just above position of statement pattern nodes found - // in both query tree - // and index (assuming that SPBubbleDownVisitor has already been - // called) - if (sSet.contains(node.getCondition()) && !bubbledFilters.contains(node.getCondition())) { - FilterBubbleDownVisitor fbdv = new FilterBubbleDownVisitor((Filter) node.clone(), compSet); - node.visit(fbdv); - bubbledFilters.add(node.getCondition()); - // checks if filter correctly placed, and if it has been, - // removes old copy of filter - if (fbdv.filterPlaced()) { - - QueryModelNode pNode = node.getParentNode(); - TupleExpr cNode = node.getArg(); - pNode.replaceChildNode(node, cNode); - - - super.meetNode(pNode); - } - super.meet(node); - - } else { - super.meet(node); - } - } - } - - // iterates through the query tree and attempts to match subtrees with - // index. When a match is - // found, the subtree is replaced by an ExternalTupleSet formed from the - // index. Pre-condition for - // calling this method is that both SPBubbleDownVisitor and - // FilterBubbleManager have been called - // to position the StatementPatterns and Filters. - private static class SubsetEqualsVisitor extends QueryModelVisitorBase { - - private TupleExpr tuple; - private QueryModelNode indexQNode; - private ExternalTupleSet set; - private Set sSet = Sets.newHashSet(); - - public SubsetEqualsVisitor(ExternalTupleSet index) { - this.tuple = index.getTupleExpr(); - this.set = index; - indexQNode = ((Projection) tuple).getArg(); - sSet = getQNodes(indexQNode); - - } - - public void meet(Join node) { - - Set eSet = getQNodes(node); - - if (eSet.containsAll(sSet) && !(node.getRightArg() instanceof BindingSetAssignment)) { - -// System.out.println("Eset is " + eSet + " and sSet is " + sSet); - - if (eSet.equals(sSet)) { - node.replaceWith(set); - return; - } else { - if (node.getLeftArg() instanceof StatementPattern && sSet.size() == 1) { - if(sSet.contains(node.getLeftArg())) { - node.setLeftArg(set); - } else if(sSet.contains(node.getRightArg())) { - node.setRightArg(set); - } else { - return; - } - } - else { - super.meet(node); - } - } - } else if (eSet.containsAll(sSet)) { - - super.meet(node); - - } - - } - //TODO might need to include BindingSetAssignment Condition here - //to account for index consisting of only filter and BindingSetAssignment nodes - public void meet(Filter node) { - - Set eSet = getQNodes(node); - - if (eSet.containsAll(sSet)) { - - if (eSet.equals(sSet)) { - node.replaceWith(set); - return; - } else { - super.meet(node); - } - } - } - } - - // visitor which determines whether a query is valid (i.e. it does not - // contain nodes other than - // Projection, Join, Filter, StatementPattern ) - private static class ValidQueryVisitor extends QueryModelVisitorBase { - - private boolean isValid = true; - - public boolean isValid() { - return isValid; - } - - public void meet(Projection node) { - node.getArg().visit(this); - } - - public void meet(Filter node) { - node.getArg().visit(this); - } - - - - - - public void meetNode(QueryModelNode node) { - - if (!((node instanceof Join) || (node instanceof StatementPattern) || (node instanceof BindingSetAssignment) || (node instanceof Var))) { - isValid = false; - return; - - } else{ - super.meetNode(node); - } - } - - } - - // repositions ExternalTuples above StatementPatterns within query tree - private static class ExtTupleExchangeVisitor extends QueryModelVisitorBase { - - private Set extTuples; - - public ExtTupleExchangeVisitor(Set extTuples) { - this.extTuples = extTuples; - } - - public void meet(Join queryNode) { - - // if query tree contains external tuples and they are not - // positioned above statement pattern node - // reposition - if (this.extTuples.size() > 0 && !(queryNode.getRightArg() instanceof ExternalTupleSet) - && !(queryNode.getRightArg() instanceof BindingSetAssignment)) { - QNodeExchanger qnev = new QNodeExchanger((QueryModelNode) queryNode.getRightArg(), this.extTuples); - queryNode.visit(qnev); - queryNode.setRightArg((TupleExpr)qnev.getReplaced()); - super.meet(queryNode); - } else { - super.meet(queryNode); - } - - } - - } - - private static class ExternalTupleCollector extends QueryModelVisitorBase { - - private Set eSet = new HashSet(); - - @Override - public void meetNode(QueryModelNode node) throws RuntimeException { - if (node instanceof ExternalTupleSet) { - eSet.add(node); - } - super.meetNode(node); - } - - public Set getExtTup() { - return eSet; - } - - } - - private static class FilterCollector extends QueryModelVisitorBase { - - private List filterList = Lists.newArrayList(); - - public List getFilters() { - return filterList; - } - - @Override - public void meet(Filter node) { - filterList.add(node.getCondition()); - super.meet(node); - } - - } - - private static void organizeBSAs(QueryModelNode node) { - - BindingSetAssignmentCollector bsac = new BindingSetAssignmentCollector(); - node.visit(bsac); - - if (bsac.containsBSAs()) { - Set bsaSet = bsac.getBindingSetAssignments(); - BindingSetAssignmentExchangeVisitor bsaev = new BindingSetAssignmentExchangeVisitor(bsaSet); - node.visit(bsaev); - } - } - - // repositions ExternalTuples above StatementPatterns within query tree - private static class BindingSetAssignmentExchangeVisitor extends QueryModelVisitorBase { - - private Set bsas; - - public BindingSetAssignmentExchangeVisitor(Set bsas) { - this.bsas = bsas; - } - - public void meet(Join queryNode) { - - // if query tree contains external tuples and they are not - // positioned above statement pattern node - // reposition - if (this.bsas.size() > 0 && !(queryNode.getRightArg() instanceof BindingSetAssignment)) { - QNodeExchanger qnev = new QNodeExchanger((QueryModelNode) queryNode.getRightArg(), bsas); - queryNode.visit(qnev); - queryNode.replaceChildNode(queryNode.getRightArg(), qnev.getReplaced()); - super.meet(queryNode); - } else { - super.meet(queryNode); - } - - } - - } - - - public static class BindingSetAssignmentCollector extends QueryModelVisitorBase { - - private Set bindingSetList = Sets.newHashSet(); - - public Set getBindingSetAssignments() { - return bindingSetList; - } - - public boolean containsBSAs() { - return (bindingSetList.size() > 0); - } - - @Override - public void meet(BindingSetAssignment node) { - bindingSetList.add(node); - super.meet(node); - } - - } - - // TODO insert BindingSetAssignments at bottom of query tree --this approach assumes - // BindingSetAssignments always removed during creation of ExternalTupleSets within - // query. There may be cases where this precondition does not hold (all BindingSetAssignments - // not removed). For now assuming it always holds. - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java deleted file mode 100644 index 772ffa40d..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java +++ /dev/null @@ -1,86 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import org.openrdf.model.ValueFactory; -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryRoot; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.sail.Sail; -import org.openrdf.sail.SailConnection; -import org.openrdf.sail.SailException; -import org.openrdf.sail.helpers.SailBase; -import org.openrdf.sail.helpers.SailConnectionWrapper; - -public class ExternalSail extends SailBase { - private final Sail s; - private final ExternalProcessor processor; - - public ExternalSail(Sail s, ExternalProcessor processor) { - this.s = s; - this.processor = processor; - } - - @Override - protected SailConnection getConnectionInternal() throws SailException { - return new ProcessingSailConnection(); - } - - @Override - public boolean isWritable() throws SailException { - return s.isWritable(); - } - - @Override - public ValueFactory getValueFactory() { - return s.getValueFactory(); - } - - @Override - protected void shutDownInternal() throws SailException { - s.shutDown(); - } - - private class ProcessingSailConnection extends SailConnectionWrapper { - - public ProcessingSailConnection() throws SailException { - super(s.getConnection()); - } - - @Override - public CloseableIteration evaluate(TupleExpr tupleExpr, Dataset dataset, - BindingSet bindings, boolean includeInferred) throws SailException { - if ((tupleExpr instanceof Projection) || (tupleExpr instanceof QueryRoot)) { - TupleExpr processedExpression = processor.process(tupleExpr); - return super.evaluate(processedExpression, dataset, bindings, includeInferred); - } else { - return super.evaluate(tupleExpr, dataset, bindings, includeInferred); - } - - } - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java deleted file mode 100644 index 082dd99cc..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java +++ /dev/null @@ -1,124 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; - -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.openrdf.model.URI; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.algebra.helpers.QueryModelTreePrinter; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; -import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; -import org.openrdf.sail.memory.MemoryStore; - -import com.google.common.collect.Lists; - -public class ExternalSailExample { - - public static void main(String[] args) throws Exception { - - Sail s = new MemoryStore(); - SailRepository repo = new SailRepository(s); - repo.initialize(); - SailRepositoryConnection conn = repo.getConnection(); - - URI sub = new URIImpl("uri:entity"); - URI subclass = new URIImpl("uri:class"); - URI obj = new URIImpl("uri:obj"); - URI talksTo = new URIImpl("uri:talksTo"); - - conn.add(sub, RDF.TYPE, subclass); - conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); - conn.add(sub, talksTo, obj); - - URI sub2 = new URIImpl("uri:entity2"); - URI subclass2 = new URIImpl("uri:class2"); - URI obj2 = new URIImpl("uri:obj2"); - - conn.add(sub2, RDF.TYPE, subclass2); - conn.add(sub2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub2, talksTo, obj2); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(new SPARQLResultsXMLWriter(System.out)); - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(indexSparqlString, null); - System.out.println(pq); - - List index = Lists.newArrayList(); - - Connector accCon = new MockInstance().getConnector("root", "".getBytes()); - String tablename = "table"; - accCon.tableOperations().create(tablename); - index.add(new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename)); - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(new SPARQLResultsXMLWriter(System.out)); - - pq = sp.parseQuery(queryString, null); - QueryModelTreePrinter mp = new QueryModelTreePrinter(); - pq.getTupleExpr().visit(mp); - System.out.println(mp.getTreeString()); - System.out.println(pq.getTupleExpr()); - - System.out.println("++++++++++++"); - ExternalProcessor processor = new ExternalProcessor(index); - System.out.println(processor.process(pq.getTupleExpr())); - - System.out.println("----------------"); - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - smartSailRepo.initialize(); - - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(new SPARQLResultsXMLWriter(System.out)); - - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java deleted file mode 100644 index 65a775fde..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java +++ /dev/null @@ -1,773 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.indexing.IndexPlanValidator.IndexPlanValidator; -import mvm.rya.indexing.IndexPlanValidator.IndexedExecutionPlanGenerator; -import mvm.rya.indexing.IndexPlanValidator.ThreshholdPlanSelector; -import mvm.rya.indexing.IndexPlanValidator.TupleReArranger; -import mvm.rya.indexing.IndexPlanValidator.ValidIndexCombinationGenerator; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.external.QueryVariableNormalizer.VarCollector; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.rdftriplestore.inference.DoNotExpandSP; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.BindingSetAssignment; -import org.openrdf.query.algebra.Difference; -import org.openrdf.query.algebra.Distinct; -import org.openrdf.query.algebra.EmptySet; -import org.openrdf.query.algebra.Extension; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Intersection; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.LeftJoin; -import org.openrdf.query.algebra.Order; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.QueryRoot; -import org.openrdf.query.algebra.Reduced; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.UnaryTupleOperator; -import org.openrdf.query.algebra.Union; -import org.openrdf.query.algebra.ValueExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.VarNameCollector; -import org.openrdf.sail.SailException; - -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -//optimizer which matches TupleExpressions associated with pre-computed queries -//to sub-queries of a given query. Each matched sub-query is replaced by an indexing node -//to delegate that portion of the query to the pre-computed query index -public class PrecompJoinOptimizer implements QueryOptimizer, Configurable { - - private List indexSet; - private Configuration conf; - private boolean init = false; - - public PrecompJoinOptimizer() { - } - - public PrecompJoinOptimizer(Configuration conf) { - this.conf = conf; - try { - indexSet = getAccIndices(conf); - init = true; - } catch (MalformedQueryException e) { - e.printStackTrace(); - } catch (SailException e) { - e.printStackTrace(); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } - } - - public PrecompJoinOptimizer(List indices, boolean useOptimalPcj) { - this.indexSet = indices; - conf = new Configuration(); - conf.setBoolean(ConfigUtils.USE_OPTIMAL_PCJ, useOptimalPcj); - } - - public void setConf(Configuration conf) { - this.conf = conf; - if (!init) { - try { - indexSet = getAccIndices(conf); - init = true; - } catch (MalformedQueryException e) { - e.printStackTrace(); - } catch (SailException e) { - e.printStackTrace(); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } - } - } - - @Override - public Configuration getConf() { - return conf; - } - - - @Override - public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) { - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(tupleExpr, indexSet); - JoinVisitor jv = new JoinVisitor(); - - if (ConfigUtils.getUseOptimalPCJ(conf) && indexSet.size() > 0) { - - //get potential relevant index combinations - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(tupleExpr); - Iterator> iter = vic.getValidIndexCombos(iep.getNormalizedIndices()); - TupleExpr bestTup = null; - TupleExpr tempTup = null; - double tempCost = 0; - double minCost = Double.MAX_VALUE; - - while (iter.hasNext()) { - //apply join visitor to place external index nodes in query - TupleExpr clone = tupleExpr.clone(); - jv.setExternalTupList(iter.next()); - jv.setSegmentFilters(new ArrayList()); - clone.visit(jv); - - //get all valid execution plans for given external index combination by considering all - //permutations of nodes in TupleExpr - IndexPlanValidator ipv = new IndexPlanValidator(false); - Iterator validTups = ipv.getValidTuples(TupleReArranger.getTupleReOrderings(clone).iterator()); - - //set valid plan according to a specified cost threshold, where cost depends on specified weights - //for number of external index nodes, common variables among joins in execution plan, and number of - //external products in execution plan - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(tupleExpr); - tempTup = tps.getThreshholdQueryPlan(validTups, .4, .5, .2, .3); - - //choose best threshhold TupleExpr among all index node combinations - tempCost = tps.getCost(tempTup, .5, .2, .3); - if(tempCost < minCost ) { - minCost = tempCost; - bestTup = tempTup; - } - } - if (bestTup != null) { - ((UnaryTupleOperator) tupleExpr).setArg(((UnaryTupleOperator) bestTup).getArg()); - } - return; - } else { - if (indexSet.size() > 0) { - jv.setExternalTupList(iep.getNormalizedIndices()); - tupleExpr.visit(jv); - } - return; - } - } - - protected class JoinVisitor extends QueryModelVisitorBase { - - private List tupList; - private List segmentFilters = Lists.newArrayList(); - - public void setExternalTupList(List tupList) { - this.tupList = tupList; - } - - public void setSegmentFilters(List segmentFilters) { - this.segmentFilters = segmentFilters; - } - - @Override - public void meet(Join node) { - - //get all filters with bindings in this segment - updateFilters(segmentFilters, true); - - try { - if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) { - return; - } - - //get nodes in this join segment - TupleExpr newJoin = null; - List args = getJoinArgs(node, new ArrayList(), false); - List joinArgs = Lists.newArrayList(); - - for (QueryModelNode qNode : args) { - assert (qNode instanceof TupleExpr); - joinArgs.add((TupleExpr) qNode); - } - - //insert all matching ExternalTupleSets in tupList into this segment - joinArgs = matchExternalTupleSets(joinArgs, tupList); - - //push down any filters that have bindings in lower segments - //and update the filters in this segment - updateFilters(segmentFilters, false); - - //form join from matching ExternalTupleSets, remaining nodes, and filters - //that can't be pushed down any further - newJoin = getNewJoin(joinArgs, getFilterChain(segmentFilters)); - - // Replace old join hierarchy - node.replaceWith(newJoin); - - //visit remaining nodes to match ExternalTupleSets with nodes further down - for (TupleExpr te : joinArgs) { - if (!(te instanceof StatementPattern) && !(te instanceof ExternalTupleSet)) { - segmentFilters = Lists.newArrayList(); - te.visit(this); - } - } - - } catch (Exception e) { - e.printStackTrace(); - } - } - - - @Override - public void meet(Filter node) { - segmentFilters.add(node); - node.getArg().visit(this); - } - - //chain filters together and return front and back of chain - private List getFilterChain(List filters) { - List filterTopBottom = Lists.newArrayList(); - Filter filterChainTop = null; - Filter filterChainBottom = null; - - for (Filter filter: filters) { - if (filterChainTop == null) { - filterChainTop = filter; - } else if (filterChainBottom == null) { - filterChainBottom = filter; - filterChainTop.setArg(filterChainBottom); - } else { - filterChainBottom.setArg(filter); - filterChainBottom = filter; - } - } - if(filterChainTop != null) { - filterTopBottom.add(filterChainTop); - } - if(filterChainBottom != null) { - filterTopBottom.add(filterChainBottom); - } - return filterTopBottom; - } - - //build newJoin node given remaining joinArgs and chain of filters - private TupleExpr getNewJoin(List args, List filterChain) { - TupleExpr newJoin; - List joinArgs = Lists.newArrayList(args); - - if (joinArgs.size() > 1) { - if (filterChain.size() > 0) { - TupleExpr finalJoinArg = joinArgs.remove(0); - TupleExpr tempJoin; - TupleExpr temp = filterChain.get(0); - - if (joinArgs.size() > 1) { - tempJoin = new Join(joinArgs.remove(0), joinArgs.remove(0)); - for (TupleExpr te : joinArgs) { - tempJoin = new Join(tempJoin, te); - } - } else { - tempJoin = joinArgs.remove(0); - } - - if (filterChain.size() == 1) { - ((Filter) temp).setArg(tempJoin); - } else { - ((Filter) filterChain.get(1)).setArg(tempJoin); - } - newJoin = new Join(temp, finalJoinArg); - } else { - newJoin = new Join(joinArgs.get(0), joinArgs.get(1)); - joinArgs.remove(0); - joinArgs.remove(0); - - for (TupleExpr te : joinArgs) { - newJoin = new Join(newJoin, te); - } - } - } else if (joinArgs.size() == 1) { - if (filterChain.size() > 0) { - newJoin = filterChain.get(0); - if (filterChain.size() == 1) { - ((Filter) newJoin).setArg(joinArgs.get(0)); - } else { - ((Filter) filterChain.get(1)).setArg(joinArgs.get(0)); - } - } else { - newJoin = joinArgs.get(0); - } - } else { - throw new IllegalStateException("JoinArgs size cannot be zero."); - } - return newJoin; - } - - - private List matchExternalTupleSets(List joinArgs, List tupList) { - - Set argSet = Sets.newHashSet(); - argSet.addAll(joinArgs); - - if(argSet.size() < joinArgs.size()) { - throw new IllegalArgumentException("Query has duplicate nodes in segment!"); - } - - Set firstJoinFilterCond = Sets.newHashSet(); - - for(Filter filter: segmentFilters) { - firstJoinFilterCond.add(filter.getCondition()); - } - - argSet.addAll(firstJoinFilterCond); - - //see if ExternalTupleSet nodes are a subset of joinArgs, and if so, replacing matching nodes - //with ExternalTupleSet - for (ExternalTupleSet tup : tupList) { - TupleExpr tupleArg = tup.getTupleExpr(); - if (isTupleValid(tupleArg)) { - List tupJoinArgs = getJoinArgs(tupleArg, - new ArrayList(), true); - Set tupJoinArgSet = Sets.newHashSet(tupJoinArgs); - if(tupJoinArgSet.size() < tupJoinArgs.size()) { - throw new IllegalArgumentException("ExternalTuple contains duplicate nodes!"); - } - if (argSet.containsAll(tupJoinArgSet)) { - argSet = Sets.newHashSet(Sets.difference(argSet, tupJoinArgSet)); - argSet.add((ExternalTupleSet) tup.clone()); - } - } - } - - //update segment filters by removing those use in ExternalTupleSet - Iterator iter = segmentFilters.iterator(); - - while(iter.hasNext()) { - Filter filt = iter.next(); - if(!argSet.contains(filt.getCondition())) { - filt.replaceWith(filt.getArg()); - iter.remove(); - } - } - - //update joinArgs - joinArgs = Lists.newArrayList(); - for(QueryModelNode node: argSet) { - if(!(node instanceof ValueExpr)) { - joinArgs.add((TupleExpr)node); - } - } - - return joinArgs; - } - - - private void updateFilters(List filters, boolean firstJoin) { - - Iterator iter = segmentFilters.iterator(); - - while (iter.hasNext()) { - if (!FilterRelocator.relocate(iter.next(), firstJoin)) { - iter.remove(); - } - } - } - - protected List getJoinArgs(TupleExpr tupleExpr, List joinArgs, boolean getFilters) { - if (tupleExpr instanceof Join) { - if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) - && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) { - Join join = (Join) tupleExpr; - getJoinArgs(join.getLeftArg(), joinArgs, getFilters); - getJoinArgs(join.getRightArg(), joinArgs, getFilters); - } - } else if(tupleExpr instanceof Filter) { - if (getFilters) { - joinArgs.add(((Filter) tupleExpr).getCondition()); - } - getJoinArgs(((Filter)tupleExpr).getArg(), joinArgs, getFilters); - } else if(tupleExpr instanceof Projection) { - getJoinArgs(((Projection)tupleExpr).getArg(), joinArgs, getFilters); - } else { - joinArgs.add(tupleExpr); - } - - return joinArgs; - } - } - - protected static class FilterRelocator extends QueryModelVisitorBase { - - - protected final Filter filter; - - protected final Set filterVars; - private boolean stopAtFirstJoin = false; - private boolean isFirstJoinFilter = false; - private boolean inSegment = true; - - - public FilterRelocator(Filter filter) { - this.filter = filter; - filterVars = VarNameCollector.process(filter.getCondition()); - } - - public FilterRelocator(Filter filter, boolean stopAtFirstJoin) { - this.filter = filter; - filterVars = VarNameCollector.process(filter.getCondition()); - this.stopAtFirstJoin = stopAtFirstJoin; - } - - public static boolean relocate(Filter filter) { - FilterRelocator fr = new FilterRelocator(filter); - filter.visit(fr); - return fr.inSegment; - } - - public static boolean relocate(Filter filter, boolean stopAtFirstJoin) { - if (stopAtFirstJoin) { - FilterRelocator fr = new FilterRelocator(filter, stopAtFirstJoin); - filter.visit(fr); - return fr.isFirstJoinFilter; - } else { - FilterRelocator fr = new FilterRelocator(filter); - filter.visit(fr); - return fr.inSegment; - } - } - - - @Override - protected void meetNode(QueryModelNode node) { - // By default, do not traverse - assert node instanceof TupleExpr; - - if(node instanceof UnaryTupleOperator) { - if (((UnaryTupleOperator)node).getArg().getBindingNames().containsAll(filterVars)) { - if (stopAtFirstJoin) { - ((UnaryTupleOperator) node).getArg().visit(this); - } else { - inSegment = false; - relocate(filter, ((UnaryTupleOperator) node).getArg()); - } - } - } - - relocate(filter, (TupleExpr) node); - } - - - @Override - public void meet(Join join) { - - if (stopAtFirstJoin) { - isFirstJoinFilter = true; - relocate(filter, join); - } else { - - if (join.getLeftArg().getBindingNames().containsAll(filterVars)) { - // All required vars are bound by the left expr - join.getLeftArg().visit(this); - } else if (join.getRightArg().getBindingNames().containsAll(filterVars)) { - // All required vars are bound by the right expr - join.getRightArg().visit(this); - } else { - relocate(filter, join); - } - } - } - - @Override - public void meet(LeftJoin leftJoin) { - - if (leftJoin.getLeftArg().getBindingNames().containsAll(filterVars)) { - inSegment = false; - if (stopAtFirstJoin) { - leftJoin.getLeftArg().visit(this); - } else { - relocate(filter, leftJoin.getLeftArg()); - } - } - else { - relocate(filter, leftJoin); - } - } - - @Override - public void meet(Union union) { - Filter clone = new Filter(); - clone.setCondition(filter.getCondition().clone()); - - relocate(filter, union.getLeftArg()); - relocate(clone, union.getRightArg()); - - inSegment = false; - - } - - @Override - public void meet(Difference node) { - Filter clone = new Filter(); - clone.setCondition(filter.getCondition().clone()); - - relocate(filter, node.getLeftArg()); - relocate(clone, node.getRightArg()); - - inSegment = false; - - } - - @Override - public void meet(Intersection node) { - Filter clone = new Filter(); - clone.setCondition(filter.getCondition().clone()); - - relocate(filter, node.getLeftArg()); - relocate(clone, node.getRightArg()); - - inSegment = false; - - } - - @Override - public void meet(Extension node) { - if (node.getArg().getBindingNames().containsAll(filterVars)) { - if (stopAtFirstJoin) { - node.getArg().visit(this); - } else { - relocate(filter, node.getArg()); - inSegment = false; - } - } - else { - relocate(filter, node); - } - } - - @Override - public void meet(EmptySet node) { - if (filter.getParentNode() != null) { - // Remove filter from its original location - filter.replaceWith(filter.getArg()); - } - } - - @Override - public void meet(Filter filter) { - // Filters are commutative - filter.getArg().visit(this); - } - - @Override - public void meet(Distinct node) { - node.getArg().visit(this); - } - - @Override - public void meet(Order node) { - node.getArg().visit(this); - } - - @Override - public void meet(QueryRoot node) { - node.getArg().visit(this); - } - - @Override - public void meet(Reduced node) { - node.getArg().visit(this); - } - - protected void relocate(Filter filter, TupleExpr newFilterArg) { - if (filter.getArg() != newFilterArg) { - if (filter.getParentNode() != null) { - // Remove filter from its original location - filter.replaceWith(filter.getArg()); - } - - // Insert filter at the new location - newFilterArg.replaceWith(filter); - filter.setArg(newFilterArg); - } - } - } - - - private static boolean isTupleValid(QueryModelNode node) { - - ValidQueryVisitor vqv = new ValidQueryVisitor(); - node.visit(vqv); - - if (vqv.isValid() && vqv.getSPs().size() > 1) { - if(vqv.getFilters().size() > 0) { - Set spVars = getVarNames(vqv.getSPs()); - Set fVarNames = getVarNames(vqv.getFilters()); - //check that all vars contained in filters also occur in SPs - return Sets.intersection(fVarNames,spVars).equals(fVarNames); - } else { - return true; - } - } else { - return false; - } - } - - - private static Set getVarNames(Collection nodes) { - - List tempVars; - Set nodeVarNames = Sets.newHashSet(); - - for (QueryModelNode s : nodes) { - tempVars = VarCollector.process(s); - for (String t : tempVars) - nodeVarNames.add(t); - } - return nodeVarNames; - } - - - private static class ValidQueryVisitor extends QueryModelVisitorBase { - - private boolean isValid = true; - private Set filterSet = Sets.newHashSet(); - private Set spSet = Sets.newHashSet(); - - public Set getFilters() { - return filterSet; - } - - public Set getSPs() { - return spSet; - } - - public boolean isValid() { - return isValid; - } - - public void meet(Projection node) { - node.getArg().visit(this); - } - - @Override - public void meet(Filter node) { - filterSet.add(node.getCondition()); - node.getArg().visit(this); - } - - @Override - public void meet(StatementPattern node) { - spSet.add(node); - } - - public void meetNode(QueryModelNode node) { - - if (!((node instanceof Join) || (node instanceof StatementPattern) || (node instanceof BindingSetAssignment) || - (node instanceof Var) || (node instanceof Union) || (node instanceof LeftJoin))) { - isValid = false; - return; - - } else{ - super.meetNode(node); - } - } - - } - - - private static List getAccIndices(Configuration conf) throws MalformedQueryException, - SailException, QueryEvaluationException, TableNotFoundException, AccumuloException, - AccumuloSecurityException { - - List tables = null; - - if (conf instanceof RdfCloudTripleStoreConfiguration) { - tables = ((RdfCloudTripleStoreConfiguration) conf).getPcjTables(); - } - - String tablePrefix = conf.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX); - Connector c = ConfigUtils.getConnector(conf); - Map indexTables = Maps.newLinkedHashMap(); - - if (tables != null && !tables.isEmpty()) { - for (String table : tables) { - Scanner s = c.createScanner(table, new Authorizations()); - s.setRange(Range.exact(new Text("~SPARQL"))); - for (Entry e : s) { - indexTables.put(table, e.getValue().toString()); - } - } - } else { - for (String table : c.tableOperations().list()) { - if (table.startsWith(tablePrefix + "INDEX")) { - Scanner s = c.createScanner(table, new Authorizations()); - s.setRange(Range.exact(new Text("~SPARQL"))); - for (Entry e : s) { - indexTables.put(table, e.getValue().toString()); - } - } - } - - } - List index = Lists.newArrayList(); - - if (indexTables.isEmpty()) { - System.out.println("No Index found"); - } else { - for (String table : indexTables.keySet()) { - String indexSparqlString = indexTables.get(table); - index.add(new AccumuloIndexSet(indexSparqlString, c, table)); - } - } - return index; - } -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java deleted file mode 100644 index d19c51141..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java +++ /dev/null @@ -1,1180 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; - -import org.openrdf.model.Literal; -import org.openrdf.model.Value; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.NAryValueOperator; -import org.openrdf.query.algebra.ProjectionElem; -import org.openrdf.query.algebra.ProjectionElemList; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.ValueConstant; -import org.openrdf.query.algebra.ValueExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -public class QueryVariableNormalizer { - - - /** - * @param tuple1 - * tuple expression from a parsed query - * @param tuple2 - * tuple expression from a parsed query (the proposed index whose - * variables are to be relabeled) - * @return list of all possible tuples obtained by substituting the - * variables of proposed index with the variables from query - * @throws Exception - * @throws IllegalArgumentException - */ - public static List getNormalizedIndex(TupleExpr tuple1, TupleExpr tuple2) throws Exception { - - List nodes1, nodes2; - TreeMap> queryMap1, indexMap1; - List> varChanges = new ArrayList>(); - List tupleList = new ArrayList(); - - - - // if tuples are equal, no need to do anything - if (tuple1.equals(tuple2)) { - tupleList.add((TupleExpr) tuple1.clone()); - return tupleList; - } - - - NormalizeQueryVisitor tupNVis = new NormalizeQueryVisitor(false); - NormalizeQueryVisitor indexNVis = new NormalizeQueryVisitor(true); - tuple1.visit(tupNVis); - tuple2.visit(indexNVis); - - - TupleExpr tuple; - queryMap1 = tupNVis.getMap(); - indexMap1 = indexNVis.getMap(); - - // TreeMaps that used for comparators - TreeMap[] trees = (TreeMap[]) new TreeMap[4]; - for (int i = 0; i < 4; i++) { - trees[i] = new TreeMap(); - } - - trees[0] = tupNVis.getKeyMap(); // query tuple variable count - trees[2] = indexNVis.getKeyMap(); // index tuple variable count - - - // if query does not contain as many constant Vars as index, - // normalization not possible. -// if (!(trees[0].keySet().size() >= trees[2].keySet().size())) { -// System.out.println("In here:1"); -// return tupleList; -// } - - // sort keys according to size of associated StatementPattern list - // this optimization ensures that initial list of HashMaps (possible - // variable substitutions) - // is as small as possible - // Maybe add additional criteria to comparator taking into account size - // of query bin lists - Set keys = indexMap1.keySet(); - List keyList = new ArrayList(keys); - Collections.sort(keyList, new ConstantKeyComp(indexMap1, queryMap1)); - - // iterate through constant values associated with smaller tuple, - // check that larger tuple constants these constants, and use lists - // of associated statement patterns to begin to construct variable - // substitutions - // that are consistent - - for (String s : keyList) { - if (queryMap1.containsKey(s)) { - nodes1 = queryMap1.get(s); - nodes2 = indexMap1.get(s); - - - if (!(nodes1.size() >= nodes2.size())) { -// System.out.println("In here: 2"); -// System.out.println("Node lists are " + nodes1 + " and " + -// nodes2); - return tupleList; - } - - trees[1] = getListVarCnt(nodes1, tupNVis.getVariableMap()); // query - // list - // variable - // count - trees[3] = getListVarCnt(nodes2, indexNVis.getVariableMap()); // index - // list - // variable - // count - Collections.sort(nodes1, new CountComp(trees[1], trees[0])); - Collections.sort(nodes2, new CountComp(trees[3], trees[2])); - - varChanges = statementCompare(nodes1, nodes2, varChanges, trees); - - if (varChanges.size() == 0) { - return tupleList; - } - } - - else { - return tupleList; - } - - } - - List filters2 = indexNVis.getFilters(); - // determine if index contains filters whose variables need to be relabeled - if (filters2.size() != 0) { - List filters1 = tupNVis.getFilters(); - // only attempt to normalize variables if query contains more filters than index - if (filters1.size() >= filters2.size()) { - Collections.sort(filters1, new FilterComp()); - Collections.sort(filters2, new FilterComp()); - - varChanges = statementCompare(filters1, filters2, varChanges, trees); - - } - } - - List> varChangeSet = new ArrayList>(); - - for (HashMap s : varChanges) { - if (!varChangeSet.contains(s)) { - varChangeSet.add(s); - } - - } - - - ValueMapVisitor valMapVis = new ValueMapVisitor(); - tuple1.visit(valMapVis); - Map valMap = valMapVis.getValueMap(); - - - for (HashMap s : varChangeSet) { - //System.out.println(s); - tuple = tuple2.clone(); - replaceTupleVariables(s, tuple, valMap); - tupleList.add(tuple); - } - - return tupleList; - - } - - /** - * Produces a list of all possible substitutions stored in HashMaps that are - * consistent with the two lists of statement patterns - * - * @param qArray - * list of Statement nodes from query tuple - * @param iArray - * list of Statement nodes from index tuple - * @param hMaps - * HashMap containing variable substitutions - * @param trees - * TreeMaps used for statement pattern node ordering - * @return - */ - private static List> statementCompare(List qArray, - List iArray, List> hMaps, TreeMap[] trees) { - - if (hMaps.size() == 0) { - HashMap, Boolean> mapConsistent = new HashMap, Boolean>(); - HashMap hMap = new HashMap(); - mapConsistent.put(hMap, false); - evaluateMap(qArray, iArray, hMap, hMaps, mapConsistent, trees); - - return hMaps; - } - - else { - - ArrayList> tempMaps = Lists.newArrayList(hMaps); - HashMap, Boolean> mapConsistent = new HashMap, Boolean>(); - for (HashMap s : hMaps) { - mapConsistent.put(s, false); - } - for (HashMap s : hMaps) { - evaluateMap(qArray, iArray, s, tempMaps, mapConsistent, trees); - } - - return tempMaps; - - } - } - - - /** - * Adds or removes HashMap substitution schemes to the list of substitutions - * schemes depending on whether or not they are consistent with the two - * lists of statement patterns - * - * @param qArray - * List of StatementPatterns associated with query array - * @param iArray - * List of StatementPatterns associated with index array - * @param hMap - * HashMap of substitutions to be analyzed for consistent and - * added or removed - * @param hMaps - * List of HashMaps containing substitution schemes - * @param trees - * Array of TreeMaps used for comparison of StatementPattern - * nodes - */ - private static void evaluateMap(List qArray, List iArray, - HashMap hMap, List> hMaps, - HashMap, Boolean> mapConsistent, TreeMap[] trees) throws IllegalArgumentException { - - // if all nodes in indexArray have been exhausted, add map of substitutions to - // list of possible substitution schemes. - if (iArray.size() == 0) { - if (!hMaps.contains(hMap)) { - hMaps.add(hMap); - } - mapConsistent.put(hMap, true); - return; - } - - // for a given constant key, iterate through possible combinations of statement pattern nodes contained in associated query list and - // index list to generate all possible substitution schemes. - for (int i = 0; i < iArray.size(); i++) { - for (int j = 0; j < qArray.size(); j++) { - //System.out.println("Query list is " + qArray+ " and index list is " + iArray); - - QueryModelNode node1 = qArray.get(j); - QueryModelNode node2 = iArray.get(i); - // if lists contain statement patterns, check to see if two given statement patterns have same structure - // independent of variables names (same constants in same place, non constant Vars in same place) - if ((node1 instanceof StatementPattern) && (node2 instanceof StatementPattern)) { - if (genConstantCompare((StatementPattern) node1, (StatementPattern) node2)) { - - List variables1 = ((StatementPattern)node1).getVarList(); - List variables2 = ((StatementPattern)node2).getVarList(); - - List> vars = genGetCommonVars(variables1, variables2); - List vars1 = vars.get(0); - List vars2 = vars.get(1); - - if (listConsistent(vars1, vars2, hMap)) { - - HashMap hashMap = Maps.newHashMap(hMap); - putVars(vars1, vars2, hashMap); - - List queryArray = Lists.newArrayList(qArray); - List indexArray = Lists.newArrayList(iArray); - - indexArray.remove(i); - queryArray.remove(j); - - evaluateMap(queryArray, indexArray, hashMap, hMaps, mapConsistent, trees); - } - } - } // if lists contain filters, see if filters have same structure independent of variables names - //(check that conditions are same independent of variable names). - else if ((node1 instanceof Filter) && (node2 instanceof Filter)) { - try { - if (filterCompare((Filter) node1, (Filter) node2)) { - - List variables1 = FilterVarValueCollector.process(((Filter) node1).getCondition()); - List variables2 = FilterVarValueCollector.process(((Filter) node2).getCondition()); - - List> vars = filterCommonVars(variables1, variables2); - List vars1 = vars.get(0); - List vars2 = vars.get(1); - - if (listConsistent(vars1, vars2, hMap)) { - - HashMap hashMap = Maps.newHashMap(hMap); - putVars(vars1, vars2, hashMap); - - List queryArray = Lists.newArrayList(qArray); - List indexArray = Lists.newArrayList(iArray); - - indexArray.remove(i); - queryArray.remove(j); - - evaluateMap(queryArray, indexArray, hashMap, hMaps, mapConsistent, trees); - } - - } - } catch (Exception e) { - System.out.println("Invalid Filter! " + e); - } - - } else { - throw new IllegalArgumentException("Invalid query tree."); - } - - } - } - if (mapConsistent.containsKey(hMap)) - if (mapConsistent.get(hMap) == false) { - hMaps.remove(hMap); - } - return; - - } - - - - - private static List> genGetCommonVars(List vars1, List vars2) { - - - List> varList = Lists.newArrayList(); - List varList1 = Lists.newArrayList(); - List varList2 = Lists.newArrayList(); - - - - for (int i = 0; i < vars1.size(); i++) { - - if (!vars1.get(i).isConstant() && !vars2.get(i).isConstant()) { - - varList1.add(vars1.get(i).getName()); - varList2.add(vars2.get(i).getName()); - - } else if(vars1.get(i).isConstant() && !vars2.get(i).isConstant()) { - varList1.add(vars1.get(i).getName()); - varList2.add(vars2.get(i).getName()); - } - - } - - varList.add(varList1); - varList.add(varList2); - - return varList; - } - - - private static List> filterCommonVars(List vars1, List vars2) { - - - List> varList = Lists.newArrayList(); - List varList1 = Lists.newArrayList(); - List varList2 = Lists.newArrayList(); - - - - for (int i = 0; i < vars1.size(); i++) { - - if ((vars1.get(i) instanceof ValueConstant) && (vars2.get(i) instanceof Var)) { - - ValueConstant vc = (ValueConstant) vars1.get(i); - String s = vc.getValue().toString(); - if(vc.getValue() instanceof Literal) { - s = s.substring(1, s.length() - 1); - } - s = "-const-" + s; - varList1.add(s); - varList2.add(((Var)vars2.get(i)).getName()); - } else if(!(vars1.get(i) instanceof ValueConstant)){ - if (!((Var) vars1.get(i)).isConstant() && (vars2.get(i) instanceof Var) - && !((Var) vars2.get(i)).isConstant()) { - varList1.add(((Var) vars1.get(i)).getName()); - varList2.add(((Var) vars2.get(i)).getName()); - } else if (((Var) vars1.get(i)).isConstant() && (vars2.get(i) instanceof Var) - && !((Var) vars2.get(i)).isConstant()) { - varList1.add(((Var) vars1.get(i)).getName()); - varList2.add(((Var) vars2.get(i)).getName()); - } - } - - } - - varList.add(varList1); - varList.add(varList2); - - return varList; - } - - - - private static boolean genConstantCompare(StatementPattern queryNode, StatementPattern indexNode) { - - - - ArrayList vars1 = (ArrayList) queryNode.getVarList(); - ArrayList vars2 = (ArrayList) indexNode.getVarList(); - - - for (int i = 0; i < vars1.size(); i++) { - - if (vars1.get(i).isConstant() && vars2.get(i).isConstant()) { - - if (!vars1.get(i).equals(vars2.get(i))) { - return false; - - } - - } else if(!vars1.get(i).isConstant() && vars2.get(i).isConstant() ) { - return false; - } - - } - - return true; - - } - - - - - /** - * Method checks that substituting val for key is consistent with - * substitutions in hMap - * - * @param val - * substituting variable - * @param key - * variable to be substituted for - * @param hMap - * HashMap containing the substitutions to be made - * @return true if the proposed substitution is consistent with hMap, and - * false otherwise - */ - private static boolean checkVariables(String val, String key, HashMap hMap) { - - if (!hMap.containsKey(key) && !hMap.containsValue(val)) { - - return true; - } else if (!hMap.containsKey(key) && hMap.containsValue(val) || hMap.containsKey(key) - && !hMap.containsValue(val)) { - - return false; - } else { - - if (hMap.get(key).equals(val)) { - return true; - } else - return false; - - } - - } - - - - - - - // given two lists of variables and a HashMap, checks to see if substituting variable names in varList1 - // for variable names in varList2 is consistent with map. - private static boolean listConsistent(List varList1, List varList2, HashMap hMap) { - - for (int k = 0; k < varList1.size(); k++) { - - String s1 = varList1.get(k); - String s2 = varList2.get(k); - if (!checkVariables(s1, s2, hMap)) { - return false; - } - } - return true; - - } - - - // given two lists of variables and a HashMap, substitutes variable names in varList1 - // for variable names in varList2 by updating map. - private static void putVars(List varList1, List varList2, HashMap hashMap) { - - for (int k = 0; k < varList1.size(); k++) { - String s1 = varList1.get(k); - String s2 = varList2.get(k); - if (!hashMap.containsKey(s2)) { - - hashMap.put(s2, s1); - } - } - - } - - - /** - * @param filter1 - * @param filter2 - * @return true if filter2 is equal to filter1 once variables in filter2 are replaced with variables and constants - * occurring in same position in filter1 (allows filter1 to contain constants where filter2 contains variables) - * @throws Exception - */ - private static boolean filterCompare(Filter filter1, Filter filter2) throws Exception { - - NodeCollector nc1 = new NodeCollector(); - NodeCollector nc2 = new NodeCollector(); - - filter1.getCondition().visit(nc1); - filter2.getCondition().visit(nc2); - - List nodeList1 = nc1.getNodes(); - List nodeList2 = nc2.getNodes(); - - if (nodeList1.size() != nodeList2.size()) { - return false; - } - - for (int i = 0; i < nodeList1.size(); i++) { - if ((nodeList1.get(i) instanceof ValueConstant) && (nodeList2.get(i) instanceof Var)) { - continue; - } else { - if (nodeList1.get(i).getClass() != nodeList2.get(i).getClass()) { - return false; - } - } - } - - return true; - - } - - /** - * Given a HashMap containing variable substitutions and a tuple, this - * method uses a visitor to iterate through the tuple and make the necessary - * substitutions - * - * @param varChanges - * @param tuple - * @throws Exception - */ - private static void replaceTupleVariables(HashMap varChanges, TupleExpr tuple, Map valMap) throws Exception { - - TupleVarRenamer visitor = new TupleVarRenamer(varChanges, valMap); - tuple.visit(visitor); - } - - /** - * Given a list of StatementPattern nodes and a TreeMap containing the - * variables in the tuple, this method counts the number of occurrences of - * each variable in the given list - * - * @param list - * List of StatementPattern nodes - * @param cnt - * TreeMap whose keys are tuple variables and whose value is 0 - * @return TreeMap whose keys are tuple variables and whose value is the - * number of times variable appears in list - */ - private static TreeMap getListVarCnt(List list, TreeMap cnt) { - - int count = 0; - - for (QueryModelNode qNode : list) { - List vars = VarCollector.process(qNode); - for (String s : vars) { - count = cnt.get(s); - count++; - cnt.put(s, count); - } - - } - - return cnt; - - } - - /** - * Given a StatementPattern and two TreeMaps containing the variable counts - * associated with an associated list and tuple, this method assigns a - * number to the StatementPattern node which is determined by the number of - * times its variables (non-constant Vars) appear in the list and throughout - * the tuple - * - * @param sp - * StatementPattern node - * @param listCount - * TreeMap with variable count info associated with list - * @param tupCount - * TreeMap with variable count info associated with tuple - * @return count info associated with StatementPattern node - */ - private static int getSpCount(QueryModelNode sp, TreeMap listCount, - TreeMap tupCount) { - - int spCount = 0; - - List vars = VarCollector.process(sp); - for (String var : vars) { - spCount = spCount + listCount.get(var) + tupCount.get(var); - } - return spCount; - - } - - /** - * @return NormalizedQueryVisitor - */ - public static NormalizeQueryVisitor getVisitor(boolean isIndex) { - return new NormalizeQueryVisitor(isIndex); - - } - - - // ********************Definition of Comparators**************** - // ************************************************************* - public static class CountComp implements Comparator { - - private TreeMap lCount, tupleCount; - - public CountComp(TreeMap lCount, TreeMap tupleCount) { - - this.lCount = lCount; - this.tupleCount = tupleCount; - } - - // compares StatementPattern nodes based on frequency at which their - // variables appear in other StatementPattern nodes in associated - // tuple and list - - public int compare(QueryModelNode sp1, QueryModelNode sp2) { - - return -(getSpCount(sp1, lCount, tupleCount) - getSpCount(sp2, lCount, tupleCount)); - } - - } - - // comparator to sort constant key list according to size of associated - // StatementPattern array - public static class ConstantKeyComp implements Comparator { - - private TreeMap> indexMap, queryMap; - - public ConstantKeyComp(TreeMap> indexMap, - TreeMap> queryMap) { - - this.indexMap = indexMap; - this.queryMap = queryMap; - - } - - // Compare method to sort keys of HashMap - // for index based on whether key also appears in query Map--if key does - // not appear - // in query map, key is given value 0 so it is moved to front when key - // list is sorted. - // If key appears in query map, key is assigned value that is the sum of - // the size of the associated - // lists in index map and query map. - - public int compare(String key1, String key2) { - - int len1 = 0; - int len2 = 0; - - if (queryMap.containsKey(key1) && indexMap.containsKey(key1)) - len1 = indexMap.get(key1).size() + queryMap.get(key1).size(); - if (queryMap.containsKey(key2) && indexMap.containsKey(key2)) - len2 = indexMap.get(key2).size() + queryMap.get(key2).size(); - - return (len1 - len2); - - } - - } - - public static class FilterComp implements Comparator { - - public int compare(QueryModelNode q1, QueryModelNode q2) { - - int size1 = VarCollector.process(q1).size(); - int size2 = VarCollector.process(q2).size(); - - return size1 - size2; - - } - - } - - // ******************** Definition of Visitors***************** - // ************************************************************ - - - - public static class ValueMapVisitor extends QueryModelVisitorBase { - - - private Map valMap = Maps.newHashMap(); - - - - public Map getValueMap() { - return valMap; - } - - public void meet(Var var) { - if (var.isConstant()) { - valMap.put(var.getName(),var.getValue()); - } - - - } - - public void meet(ValueConstant val) { - - String s = val.getValue().toString(); - - if (val.getValue() instanceof Literal) { - s = s.substring(1, s.length() - 1); - } - - s = "-const-" + s; - valMap.put(s, val.getValue()); - } - - } - - - - - - - - public static class NodeCollector extends QueryModelVisitorBase { - - - private List nodes = Lists.newArrayList(); - - public List getNodes() { - return nodes; - } - - @Override - public void meetNode(QueryModelNode node) throws Exception { - nodes.add(node); - super.meetNode(node); - } - - } - - public static class SpVarReNamer extends QueryModelVisitorBase { - - private final HashMap hMap; - private Map valMap; - private final ValueFactoryImpl vf = new ValueFactoryImpl(); - - public SpVarReNamer(HashMap hMap, Map valMap) { - this.valMap = valMap; - this.hMap = hMap; - } - - public void meet(Var var) { - if (!var.isConstant() && hMap.containsKey(var.getName())) { - String val = hMap.get(var.getName()); - if (val.startsWith("-const-")) { - var.setName(val); - var.setValue(valMap.get(val)); - var.setAnonymous(true); //TODO this might be a hack -- when are Vars not anonymous? - } else { - var.setName(val); - } - } - } - - } - - - - - public static class FilterVarReNamer extends QueryModelVisitorBase { - - private final HashMap hMap; - private Map valMap; - private final ValueFactoryImpl vf = new ValueFactoryImpl(); - - public FilterVarReNamer(HashMap hMap, Map valMap) { - this.valMap = valMap; - this.hMap = hMap; - } - - @Override - public void meet(Var var) { - - if (!(var.getParentNode() instanceof NAryValueOperator)) { - if (!var.isConstant() && hMap.containsKey(var.getName())) { - String val = hMap.get(var.getName()); - if (val.startsWith("-const-")) { - var.replaceWith(new ValueConstant(valMap.get(val))); - } else { - var.setName(val); - } - } - } - } - - - - @Override - public void meetNAryValueOperator(NAryValueOperator node) { - - List oldValues = node.getArguments(); - List newValues = Lists.newArrayList(); - - for (ValueExpr v : oldValues) { - if (v instanceof Var) { - Var var = (Var) v; - if (!(var.isConstant() && hMap.containsKey(var.getName()))) { - String val = hMap.get(var.getName()); - if (val.startsWith("-const-")) { - newValues.add(new ValueConstant(valMap.get(val))); - } else { - var.setName(val); - newValues.add(var); - } - } - } else { - newValues.add(v); - } - } - - node.setArguments(newValues); - - } - - - } - - - - - public static class TupleVarRenamer extends QueryModelVisitorBase { - - private final HashMap varChanges; - private Map valMap; - - public TupleVarRenamer(HashMap varChanges, Map valMap) { - this.varChanges = varChanges; - this.valMap = valMap; - } - - @Override - public void meet(ProjectionElemList node) { - List proj = node.getElements(); - for (ProjectionElem s : proj) { - if (varChanges.containsKey(s.getSourceName())) { - String name = s.getSourceName(); - s.setSourceName(varChanges.get(name)); - s.setTargetName(varChanges.get(name)); - - } - } - - } - - - @Override - public void meet(StatementPattern node) { - SpVarReNamer spv = new SpVarReNamer(varChanges, valMap); - node.visit(spv); - } - - - @Override - public void meet(Filter node) { - FilterVarReNamer fvr = new FilterVarReNamer(varChanges, valMap); - node.getCondition().visit(fvr); - node.getArg().visit(this); - - } - - - - } - - public static class VarCollector extends QueryModelVisitorBase { - - public static List process(QueryModelNode node) { - VarCollector collector = new VarCollector(); - node.visit(collector); - return collector.getVarNames(); - } - - public static List processVar(QueryModelNode node) { - VarCollector collector = new VarCollector(); - node.visit(collector); - return collector.getVars(); - } - - private List varNames = new ArrayList(); - private List vars = Lists.newArrayList(); - - public List getVarNames() { - return varNames; - } - - public List getVars() { - return vars; - } - - @Override - public void meet(Var var) { - if (!var.hasValue()) { - varNames.add(var.getName()); - } - vars.add(var); - } - } - - public static class FilterVarValueCollector extends QueryModelVisitorBase { - - public static List process(QueryModelNode node) { - FilterVarValueCollector collector = new FilterVarValueCollector(); - node.visit(collector); - return collector.getVars(); - } - - - - private List vars = Lists.newArrayList(); - - - public List getVars() { - return vars; - } - - @Override - public void meet(Var node) { - vars.add(node); - } - - @Override - public void meet(ValueConstant node) { - vars.add(node); - } - - - - } - - - - - public static class NormalizeQueryVisitor extends QueryModelVisitorBase { - - private TreeMap> map = new TreeMap>(); - private TreeMap varMap = new TreeMap(); - private TreeMap emptyVarMap = new TreeMap(); - private List statementList = new ArrayList(); - private List filters = new ArrayList(); - private boolean isIndex; - - - - public NormalizeQueryVisitor(boolean isIndex) { - this.isIndex = isIndex; - } - - - - private TreeMap> getMap() { - - return map; - - } - - - private TreeMap getKeyMap() { - - return varMap; - } - - private TreeMap getVariableMap() { - return emptyVarMap; - } - - public List getStatementPatterns() { - return statementList; - } - - - private List getFilters() { - - return filters; - } - - @Override - public void meet(StatementPattern node) throws Exception { - - statementList.add(node); - - String s = ""; - String t = ""; - - Var node1 = node.getSubjectVar(); - Var node2 = node.getObjectVar(); - Var node3 = node.getPredicateVar(); - Var node4 = node.getContextVar(); - - String s1 = ""; - String s2 = ""; - String s3 = ""; - String s4 = ""; - - - if (node1.isConstant()) - s1 = node1.getName().substring(7); - - if (node2.isConstant()) - s2 = node2.getName().substring(7); - - if (node3.isConstant()) - s3 = node3.getName().substring(7); - - if (node4 != null) { - if (node4.isConstant()) - s4 = node4.getName().substring(7); - } - - if ((s1+s2+s3).length() == 0) { - s = "Nonconstant nodes have no variables."; - } - - List nodes; - - - if (s.length() > 0) { - - if (map.containsKey(s)) { - nodes = map.get(s); - nodes.add(node); - } else { - nodes = new ArrayList(); - nodes.add(node); - } - - map.put(s, nodes); - - } else { - - if (isIndex) { - - t = s1 + s2 + s3 + s4; - - if (map.containsKey(t)) { - nodes = map.get(t); - nodes.add(node); - } else { - nodes = new ArrayList(); - nodes.add(node); - } - - map.put(t, nodes); - - } else { - - String[] comps = new String[4]; - comps[0] = s1; - comps[1] = s2; - comps[2] = s3; - comps[3] = s4; - - for (int i = 0; i < 3; i++) { - if (comps[i].length() != 0) { - if (map.containsKey(comps[i] + comps[3])) { - nodes = map.get(comps[i] + comps[3]); - nodes.add(node); - } else { - nodes = new ArrayList(); - nodes.add(node); - } - - map.put(comps[i] + comps[3], nodes); - - for (int j = i + 1; j < 3; j++) { - if (comps[j].length() != 0) { - if (map.containsKey(comps[i] + comps[j] + comps[3])) { - nodes = map.get(comps[i] + comps[j] + comps[3]); - nodes.add(node); - } else { - nodes = new ArrayList(); - nodes.add(node); - } - map.put(comps[i] + comps[j] + comps[3], nodes); - } - - } - } - } - - if (s1.length() != 0 && s2.length() != 0 && s3.length() != 0) { - if (map.containsKey(s1 + s2 + s3 + s4)) { - nodes = map.get(s1 + s2 + s3 + s4); - nodes.add(node); - } else { - nodes = new ArrayList(); - nodes.add(node); - } - map.put(s1 + s2 + s3 + s4, nodes); - } - } - } - - super.meet(node); - - } - - @Override - public void meet(Var node) throws Exception { - - int count = 1; - - if (!node.isConstant()) { - if (varMap.containsKey(node.getName())) { - count = varMap.get(node.getName()); - count++; - varMap.put(node.getName(), count); - } else - varMap.put(node.getName(), 1); - - if (!emptyVarMap.containsKey(node.getName())) - emptyVarMap.put(node.getName(), 0); - - } - super.meet(node); - } - - public void meet(Filter filter) throws Exception { - filters.add(filter); - super.meet(filter); - } - - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java deleted file mode 100644 index dda452d1d..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java +++ /dev/null @@ -1,626 +0,0 @@ -package mvm.rya.indexing.external.tupleSet; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.NoSuchElementException; -import java.util.Set; - -import mvm.rya.accumulo.precompQuery.AccumuloPrecompQueryIndexer; -import mvm.rya.rdftriplestore.evaluation.ExternalBatchingIterator; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.io.Text; -import org.openrdf.model.Literal; -import org.openrdf.model.URI; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.query.Binding; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.ValueExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.impl.EmptyBindingSet; -import org.openrdf.query.parser.ParsedTupleQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.SailException; - -import com.beust.jcommander.internal.Sets; -import com.google.common.base.Joiner; -import com.google.common.collect.BiMap; -import com.google.common.collect.HashBiMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -public class AccumuloIndexSet extends ExternalTupleSet implements ExternalBatchingIterator { - - private static final int WRITER_MAX_WRITE_THREADS = 30; - private static final long WRITER_MAX_LATNECY = Long.MAX_VALUE; - private static final long WRITER_MAX_MEMORY = 500L * 1024L * 1024L; - private Map bindings; - private List bindingslist; - private final Connector accCon; - private final String tablename; - private long tableSize = 0; - private List varOrder = null; - - - public static interface AccValueFactory { - public org.openrdf.model.Value create(String str); - - public String create(org.openrdf.model.Value val); - } - - public static class AccUrlFactory implements AccValueFactory { - @Override - public org.openrdf.model.Value create(final String str) { - return new URIImpl(str); - } - - @Override - public String create(org.openrdf.model.Value val) { - return val.stringValue(); - } - } - - public static class AccValueFactoryImpl implements AccValueFactory { - @Override - public org.openrdf.model.Value create(final String str) { - String[] split = str.split("\u0001"); - if (split.length > 1 && split[1].equals("1")) { - return new URIImpl(split[0]); - } - if (split[0].contains(":")) { - return new URIImpl(split[0]); - } - return new LiteralImpl(split[0]); - } - - @Override - public String create(org.openrdf.model.Value val) { - if (val instanceof URI) { - return val.stringValue() + "\u0001" + 1; - } - if (val instanceof Literal) { - Literal v = (Literal) val; - return v.getLabel() + "\u0001" + 2; - } - return null; - } - } - - - //TODO set supportedVarOrderMap - public AccumuloIndexSet(String sparql, SailRepositoryConnection conn, Connector accCon, String tablename) throws MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException { - super(null); - this.tablename = tablename; - this.accCon = accCon; - SPARQLParser sp = new SPARQLParser(); - ParsedTupleQuery pq = (ParsedTupleQuery) sp.parseQuery(sparql, null); - - setProjectionExpr((Projection) pq.getTupleExpr()); - CloseableIteration iter = (CloseableIteration) conn.getSailConnection() - .evaluate(getTupleExpr(), null, new EmptyBindingSet(), false); - - BatchWriter w = accCon.createBatchWriter(tablename, WRITER_MAX_MEMORY, WRITER_MAX_LATNECY, WRITER_MAX_WRITE_THREADS); - this.bindingslist = Lists.newArrayList(pq.getTupleExpr().getAssuredBindingNames()); - - this.bindings = Maps.newHashMap(); - - pq.getTupleExpr().visit(new QueryModelVisitorBase() { - @Override - public void meet(Var node) { - QueryModelNode parent = node.getParentNode(); - if (parent instanceof StatementPattern) { - StatementPattern statement = (StatementPattern) parent; - if (node.equals(statement.getSubjectVar())) { - bindings.put(node.getName(), new AccUrlFactory()); - } - if (node.equals(statement.getPredicateVar())) { - bindings.put(node.getName(), new AccUrlFactory()); - } - if (node.equals(statement.getObjectVar())) { - bindings.put(node.getName(), new AccValueFactoryImpl()); - } - if (node.equals(statement.getContextVar())) { - // TODO is this correct? - bindings.put(node.getName(), new AccUrlFactory()); - } - } else if(parent instanceof ValueExpr) { - bindings.put(node.getName(), new AccValueFactoryImpl()); - } - }; - }); - - - - - - varOrder = new ArrayList(bindingslist.size()); - - while (iter.hasNext()) { - - BindingSet bs = iter.next(); - List shiftBindingList = null; - for (int j = 0; j < bindingslist.size(); j++) { - StringBuffer sb = new StringBuffer(); - shiftBindingList = listShift(bindingslist, j); //TODO calling this each time not efficient - String order = ""; - for (String b : shiftBindingList) { - String val = bindings.get(b).create(bs.getValue(b)); - sb.append(val).append("\u0000"); - if (order.length() == 0) { - order = b; - } else { - order = order + "\u0000" + b; - } - } - - if (varOrder.size() < bindingslist.size()) { - varOrder.add(order); - } - - //System.out.println("String buffer is " + sb); - Mutation m = new Mutation(sb.deleteCharAt(sb.length() - 1).toString()); - m.put(new Text(varOrder.get(j)), new Text(""), new org.apache.accumulo.core.data.Value(new byte[]{})); - w.addMutation(m); - } - tableSize += 1; - } - - setLocalityGroups(tablename, accCon, varOrder); - this.setSupportedVariableOrderMap(createSupportedVarOrderMap(varOrder)); - - - String orders = ""; - - for(String s : varOrder) { - s = s.replace("\u0000", ";"); - if(orders.length() == 0) { - orders = s; - } else { - orders = orders + "\u0000" + s; - } - } - - - Mutation m = new Mutation("~SPARQL"); - Value v = new Value(sparql.getBytes()); - m.put(new Text("" + tableSize), new Text(orders), v); - w.addMutation(m); - - w.close(); - iter.close(); - } - - - - - @Override - public Map> getSupportedVariableOrders() { - - return this.getSupportedVariableOrderMap(); - - } - - - @Override - public boolean supportsBindingSet(Set bindingNames) { - - Map> varOrderMap = this.getSupportedVariableOrders(); - Collection> values = varOrderMap.values(); - Set bNames = Sets.newHashSet(); - - for (String s : this.getTupleExpr().getAssuredBindingNames()) { - if (bindingNames.contains(s)) { - bNames.add(s); - } - } - - return values.contains(bNames); - } - - - private String getVarOrder(Set variables) { - - Map> varOrderMap = this.getSupportedVariableOrders(); - - Set>> entries = varOrderMap.entrySet(); - - for (Map.Entry> e : entries) { - - if (e.getValue().equals(variables)) { - return e.getKey(); - } - - } - - return null; - - } - - private String prefixToOrder(String order) { - - Map invMap = HashBiMap.create(this.getTableVarMap()).inverse(); - String[] temp = order.split("\u0000"); - - for (int i = 0; i < temp.length; i++) { - temp[i] = this.getTableVarMap().get(temp[i]); - } - - order = Joiner.on("\u0000").join(temp); - - for (String s : varOrder) { - if (s.startsWith(order)) { - - temp = s.split("\u0000"); - - for (int i = 0; i < temp.length; i++) { - temp[i] = invMap.get(temp[i]); - } - return Joiner.on("\u0000").join(temp); - } - } - throw new NoSuchElementException("Order is not a prefix of any locality group value!"); - } - - private String orderToLocGroup(List order) { - String localityGroup = ""; - for (String s : order) { - if (localityGroup.length() == 0) { - localityGroup = this.getTableVarMap().get(s); - } else { - localityGroup = localityGroup + "\u0000" + this.getTableVarMap().get(s); - } - } - return localityGroup; - - } - - - private void setLocalityGroups(String tableName, Connector conn, List groups) { - - HashMap> localityGroups = new HashMap>(); - - - - for (int i = 0; i < groups.size(); i++) { - HashSet tempColumn = new HashSet(); - tempColumn.add(new Text(groups.get(i))); - String groupName = groups.get(i).replace("\u0000",""); - localityGroups.put(groupName, tempColumn); - } - - - try { - conn.tableOperations().setLocalityGroups(tableName, localityGroups); - } catch (AccumuloException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TableNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - - } - - - - - - - - private List listShift(List list, int j) { - - if(j >= list.size()) { - throw new IllegalArgumentException(); - } - - List shiftList = Lists.newArrayList(); - for(int i=0; i getConstantConstraints() { - - Map tableMap = this.getTableVarMap(); - Set keys = tableMap.keySet(); - Set constants = Sets.newHashSet(); - - for (String s : keys) { - if (s.startsWith("-const-")) { - constants.add(s); - } - - } - - return constants; - - } - - - - - public AccumuloIndexSet(String sparql, Connector accCon, String tablename) throws MalformedQueryException, SailException, QueryEvaluationException, - MutationsRejectedException, TableNotFoundException { - super(null); - this.tablename = tablename; - this.accCon = accCon; - SPARQLParser sp = new SPARQLParser(); - ParsedTupleQuery pq = (ParsedTupleQuery) sp.parseQuery(sparql, null); - - setProjectionExpr((Projection) pq.getTupleExpr()); - - this.bindingslist = Lists.newArrayList(pq.getTupleExpr().getAssuredBindingNames()); - - this.bindings = Maps.newHashMap(); - pq.getTupleExpr().visit(new QueryModelVisitorBase() { - @Override - public void meet(Var node) { - QueryModelNode parent = node.getParentNode(); - if (parent instanceof StatementPattern) { - StatementPattern statement = (StatementPattern) parent; - if (node.equals(statement.getSubjectVar())) { - bindings.put(node.getName(), new AccUrlFactory()); - } - if (node.equals(statement.getPredicateVar())) { - bindings.put(node.getName(), new AccUrlFactory()); - } - if (node.equals(statement.getObjectVar())) { - bindings.put(node.getName(), new AccValueFactoryImpl()); - } - if (node.equals(statement.getContextVar())) { - // TODO is this correct? - bindings.put(node.getName(), new AccUrlFactory()); - } - } else if(parent instanceof ValueExpr) { - bindings.put(node.getName(), new AccValueFactoryImpl()); - } - }; - }); - - - - - Scanner s = accCon.createScanner(tablename, new Authorizations()); - s.setRange(Range.exact(new Text("~SPARQL"))); - Iterator> i = s.iterator(); - - String[] tempVarOrders = null; - - if (i.hasNext()) { - Entry entry = i.next(); - Text ts = entry.getKey().getColumnFamily(); - tempVarOrders = entry.getKey().getColumnQualifier().toString().split("\u0000"); - tableSize = Long.parseLong(ts.toString()); - - } else { - throw new IllegalStateException("Index table contains no metadata!"); - } - - - varOrder = Lists.newArrayList(); - - for(String t: tempVarOrders) { - t = t.replace(";","\u0000"); - varOrder.add(t); - } - - setLocalityGroups(tablename, accCon, varOrder); - this.setSupportedVariableOrderMap(createSupportedVarOrderMap(varOrder)); - - } - - - - - private Map> createSupportedVarOrderMap(List orders) { - - Map> supportedVars = Maps.newHashMap(); - - for (String t : orders) { - - String[] tempOrder = t.split("\u0000"); - Set varSet = Sets.newHashSet(); - String u = ""; - - for (String s : tempOrder) { - if(u.length() == 0) { - u = s; - } else{ - u = u+ "\u0000" + s; - } - varSet.add(s); - supportedVars.put(u, new HashSet(varSet)); - - } - - } - - return supportedVars; - } - - - - @Override - public void setProjectionExpr(Projection tupleExpr) { - super.setProjectionExpr(tupleExpr); - this.bindingslist = Lists.newArrayList(tupleExpr.getAssuredBindingNames()); - - this.bindings = Maps.newHashMap(); - tupleExpr.visit(new QueryModelVisitorBase() { - @Override - public void meet(Var node) { - QueryModelNode parent = node.getParentNode(); - if (parent instanceof StatementPattern) { - StatementPattern statement = (StatementPattern) parent; - if (node.equals(statement.getSubjectVar())) { - bindings.put(node.getName(), new AccUrlFactory()); - } - if (node.equals(statement.getPredicateVar())) { - bindings.put(node.getName(), new AccUrlFactory()); - } - if (node.equals(statement.getObjectVar())) { - bindings.put(node.getName(), new AccValueFactoryImpl()); - } - if (node.equals(statement.getContextVar())) { - // TODO is this correct? - bindings.put(node.getName(), new AccUrlFactory()); - } - } else if (parent instanceof ValueExpr) { //Add bindings associated with Filters - bindings.put(node.getName(), new AccValueFactoryImpl()); - } - }; - }); - - } - - @Override - public String getSignature() { - return "AccumuloIndexSet(" + tablename + ") : " + Joiner.on(", ").join(bindingslist); - } - - @Override - public CloseableIteration evaluate(BindingSet bindingset) throws QueryEvaluationException { - return this.evaluate(Collections.singleton(bindingset)); - } - - @Override - public double cardinality() { - return tableSize; - } - - @Override - public CloseableIteration evaluate(final Collection bindingset) throws QueryEvaluationException { - - String localityGroup = ""; - Set commonVars = Sets.newHashSet(); - - if (!bindingset.isEmpty()) { - - BindingSet bs = bindingset.iterator().next(); - for (String b : bindingslist) { - Binding v = bs.getBinding(b); - if (v != null) { - commonVars.add(b); - } - - } - } - - commonVars.addAll(getConstantConstraints()); - AccumuloPrecompQueryIndexer apq = null; - List fullVarOrder = null; - try { - - if (commonVars.size() > 0) { - String commonVarOrder = getVarOrder(commonVars); - if(commonVarOrder == null) { - throw new IllegalStateException("Index does not support binding set!"); - } - fullVarOrder = Lists.newArrayList(prefixToOrder(commonVarOrder).split("\u0000")); - localityGroup = orderToLocGroup(fullVarOrder); - fullVarOrder.add("" + commonVars.size()); - - } else { - fullVarOrder = bindingslist; - localityGroup = orderToLocGroup(fullVarOrder); - fullVarOrder.add("" + 0); - } - - - apq = new AccumuloPrecompQueryIndexer(accCon, tablename); - ValueMapVisitor vmv = new ValueMapVisitor(); - this.getTupleExpr().visit(vmv); - - return apq.queryPrecompJoin(fullVarOrder, localityGroup, this.bindings, vmv.getValMap(), bindingset); - - } catch(TableNotFoundException e) { - throw new QueryEvaluationException(e); - } finally { - IOUtils.closeQuietly(apq); - } - } - - - public class ValueMapVisitor extends QueryModelVisitorBase { - - Map valMap = Maps.newHashMap(); - - - public Map getValMap() { - return valMap; - } - - @Override - public void meet(Var node) { - if (node.getName().startsWith("-const-")) { - valMap.put(node.getName(), node.getValue()); - } - - } - - } - - -} - diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java deleted file mode 100644 index 0e2096d50..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java +++ /dev/null @@ -1,213 +0,0 @@ -package mvm.rya.indexing.external.tupleSet; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.impl.ExternalSet; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.beust.jcommander.internal.Sets; -import com.google.common.base.Joiner; -import com.google.common.collect.Maps; - -/** - * Abstract class for an External Tuple Set. This Tuple - */ -public abstract class ExternalTupleSet extends ExternalSet { - - private Projection tupleExpr; - private Map tableVarMap = Maps.newHashMap(); - private Map> supportedVarOrders = Maps.newHashMap(); - - - public ExternalTupleSet() { - - } - - public ExternalTupleSet(Projection tupleExpr) { - this.tupleExpr = tupleExpr; - } - - @Override - abstract public CloseableIteration evaluate(BindingSet bindings) throws QueryEvaluationException; - - @Override - public Set getBindingNames() { - return tupleExpr.getBindingNames(); - } - - @Override - public Set getAssuredBindingNames() { - return tupleExpr.getAssuredBindingNames(); - } - - @Override - public String getSignature() { - return "(External Projection) " + Joiner.on(", ").join(tupleExpr.getProjectionElemList().getElements()).replaceAll("\\s+", " "); - } - - public Projection getTupleExpr() { - return tupleExpr; - } - - public void setProjectionExpr(Projection tupleExpr) { - this.tupleExpr = tupleExpr; - } - - - public void setTableVarMap(Map vars) { - this.tableVarMap = vars; - } - - - public Map getTableVarMap() { - return this.tableVarMap; - } - - - public void setSupportedVariableOrderMap(Map> varOrders) { - this.supportedVarOrders = varOrders; - } - - - public Map> getSupportedVariableOrderMap() { - return supportedVarOrders; - } - - - public void updateTupleExp(final Map oldToNewBindings) { - tupleExpr.visit(new QueryModelVisitorBase() { - @Override - public void meet(Var var) { - if (oldToNewBindings.containsKey(var)) { - var.replaceWith(oldToNewBindings.get(var)); - } - } - }); - } - - @Override - public ExternalSet clone() { - ExternalTupleSet clone = (ExternalTupleSet) super.clone(); - clone.tupleExpr = this.tupleExpr.clone(); - clone.tableVarMap = Maps.newHashMap(); - for(String s: this.tableVarMap.keySet()) { - clone.tableVarMap.put(s,this.tableVarMap.get(s)); - } - clone.supportedVarOrders = Maps.newHashMap(); - for(String s: this.supportedVarOrders.keySet()) { - clone.supportedVarOrders.put(s,this.supportedVarOrders.get(s)); - } - return clone; - } - - - public Map> getSupportedVariableOrders() { - - if (supportedVarOrders.size() != 0) { - return supportedVarOrders; - } else { - - Set varSet = Sets.newHashSet(); - String t = ""; - - for (String s : tupleExpr.getAssuredBindingNames()) { - if (t.length() == 0) { - t = s; - } else { - t = t + "\u0000" + s; - } - - varSet.add(s); - supportedVarOrders.put(t, new HashSet(varSet)); - - } - - return supportedVarOrders; - } - } - - - - - public boolean supportsBindingSet(Set bindingNames) { - - Map> varOrderMap = getSupportedVariableOrders(); - String bNames = ""; - - for (String s : tupleExpr.getAssuredBindingNames()) { - if (bindingNames.contains(s)) { - if(bNames.length() == 0) { - bNames = s; - } else { - bNames = bNames + "\u0000"+ s; - } - } - } - - return varOrderMap.containsKey(bNames); - } - - - - @Override - public boolean equals(Object other) { - - if (!(other instanceof ExternalTupleSet)) { - return false; - } else { - - ExternalTupleSet arg = (ExternalTupleSet) other; - if (this.getTupleExpr().equals(arg.getTupleExpr())) { - return true; - } else { - return false; - } - - } - - } - - - @Override - public int hashCode() { - int result = 17; - result = 31*result + tupleExpr.hashCode(); - - return result; - } - - - - - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java deleted file mode 100644 index 44925cad1..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java +++ /dev/null @@ -1,88 +0,0 @@ -package mvm.rya.indexing.external.tupleSet; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelVisitor; - -import com.google.common.base.Joiner; - - - - - - -public class SimpleExternalTupleSet extends ExternalTupleSet { - - - - public SimpleExternalTupleSet(Projection tuple) { - super(); - this.setProjectionExpr(tuple); - - } - - @Override - public void visit(QueryModelVisitor visitor) - throws X - { - visitor.meetOther(this); - } - - @Override - public CloseableIteration evaluate(BindingSet bindings) - throws QueryEvaluationException { - // TODO Auto-generated method stub - return null; - } - - @Override - public String getSignature() { - return "(SimpleExternalTupleSet) " - + Joiner.on(", ").join(this.getTupleExpr().getProjectionElemList().getElements()).replaceAll("\\s+", " "); - - } - - @Override - public boolean equals(Object other) { - - if (!(other instanceof SimpleExternalTupleSet)) { - return false; - } else { - - SimpleExternalTupleSet arg = (SimpleExternalTupleSet) other; - if (this.getTupleExpr().equals(arg.getTupleExpr())) { - return true; - } else { - return false; - } - - } - - } - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java deleted file mode 100644 index 4a708abb6..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java +++ /dev/null @@ -1,73 +0,0 @@ -package mvm.rya.indexing.mongodb; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.IOException; -import java.util.Collection; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.index.RyaSecondaryIndexer; - -import org.apache.hadoop.conf.Configuration; - -public abstract class AbstractMongoIndexer implements RyaSecondaryIndexer { - - @Override - public void close() throws IOException { - } - - @Override - public void flush() throws IOException { - } - - - @Override - public Configuration getConf() { - return null; - } - - - @Override - public String getTableName() { - return null; - } - - @Override - public void storeStatements(Collection ryaStatements) - throws IOException { - for (RyaStatement ryaStatement : ryaStatements){ - storeStatement(ryaStatement); - } - - } - - @Override - public void deleteStatement(RyaStatement stmt) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public void dropGraph(RyaURI... graphs) { - throw new UnsupportedOperationException(); - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java deleted file mode 100644 index 0355225af..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java +++ /dev/null @@ -1,185 +0,0 @@ -package mvm.rya.indexing.mongodb; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.StatementSerializer; -import mvm.rya.indexing.accumulo.geo.GeoParseUtils; - -import org.apache.commons.codec.binary.Hex; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; - -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.vividsolutions.jts.geom.Coordinate; -import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.io.ParseException; -import com.vividsolutions.jts.io.WKTReader; - -public class GeoMongoDBStorageStrategy { - - private static final String ID = "_id"; - private static final String GEO = "location"; - private static final String CONTEXT = "context"; - private static final String PREDICATE = "predicate"; - private static final String OBJECT = "object"; - private static final String SUBJECT = "subject"; - public enum GeoQueryType { - INTERSECTS { - public String getKeyword() { - return "$geoIntersects"; - } - }, WITHIN { - public String getKeyword() { - return "$geoWithin"; - } - }, - EQUALS { - public String getKeyword() { - return "$near"; - } - }; - - public abstract String getKeyword(); - } - - private double maxDistance; - - - public GeoMongoDBStorageStrategy(double maxDistance) { - this.maxDistance = maxDistance; - } - - public void createIndices(DBCollection coll){ - coll.createIndex("{" + GEO + " : \"2dsphere\"" ); - } - - public DBObject getQuery(StatementContraints contraints, Geometry geo, GeoQueryType queryType) { - BasicDBObject query; - if (queryType.equals(GeoQueryType.EQUALS)){ - List points = getCorrespondingPoints(geo); - if (points.size() == 1){ - List circle = new ArrayList(); - circle.add(points.get(0)); - circle.add(maxDistance); - BasicDBObject polygon = new BasicDBObject("$centerSphere", circle); - query = new BasicDBObject(GEO, new BasicDBObject(GeoQueryType.WITHIN.getKeyword(), polygon)); - }else { - query = new BasicDBObject(GEO, points); - } - - } - else { - query = new BasicDBObject(GEO, new BasicDBObject(queryType.getKeyword(), new BasicDBObject("$polygon", getCorrespondingPoints(geo)))); - } - if (contraints.hasSubject()){ - query.append(SUBJECT, contraints.getSubject().toString()); - } - if (contraints.hasPredicates()){ - Set predicates = contraints.getPredicates(); - if (predicates.size() > 1){ - BasicDBList or = new BasicDBList(); - for (URI pred : predicates){ - DBObject currentPred = new BasicDBObject(PREDICATE, pred.toString()); - or.add(currentPred); - } - query.append("$or", or); - } - else if (!predicates.isEmpty()){ - query.append(PREDICATE, predicates.iterator().next().toString()); - } - } - if (contraints.hasContext()){ - query.append(CONTEXT, contraints.getContext().toString()); - } - - return query; - } - - - public Statement deserializeDBObject(DBObject queryResult) { - Map result = queryResult.toMap(); - String subject = (String) result.get(SUBJECT); - String object = (String) result.get(OBJECT); - String predicate = (String) result.get(PREDICATE); - String context = (String) result.get(CONTEXT); - if (!context.isEmpty()){ - return StatementSerializer.readStatement(subject, predicate, object, context); - } - return StatementSerializer.readStatement(subject, predicate, object); - } - - - - public DBObject serialize(Statement statement) throws ParseException{ - // if the object is wkt, then try to index it - // write the statement data to the fields - Geometry geo = (new WKTReader()).read(GeoParseUtils.getWellKnownText(statement)); - if(geo == null || geo.isEmpty() || !geo.isValid()) { - throw new ParseException("Could not create geometry for statement " + statement); - } - - String context = ""; - if (statement.getContext() != null){ - context = StatementSerializer.writeContext(statement); - } - String id = StatementSerializer.writeSubject(statement) + " " + - StatementSerializer.writePredicate(statement) + " " + StatementSerializer.writeObject(statement) + " " + context; - byte[] bytes = id.getBytes(); - try { - MessageDigest digest = MessageDigest.getInstance("SHA-1"); - bytes = digest.digest(bytes); - } catch (NoSuchAlgorithmException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - BasicDBObject doc = new BasicDBObject(ID, new String(Hex.encodeHex(bytes))) - .append(GEO, getCorrespondingPoints(geo)) - .append(SUBJECT, StatementSerializer.writeSubject(statement)) - .append(PREDICATE, StatementSerializer.writePredicate(statement)) - .append(OBJECT, StatementSerializer.writeObject(statement)) - .append(CONTEXT, context); - return doc; - - } - - private List getCorrespondingPoints(Geometry geo){ - List points = new ArrayList(); - for (Coordinate coord : geo.getCoordinates()){ - points.add(new double[] { - coord.x, coord.y - }); - } - return points; - - } - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java deleted file mode 100644 index c36b125cd..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java +++ /dev/null @@ -1,259 +0,0 @@ -package mvm.rya.indexing.mongodb; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.net.UnknownHostException; -import java.util.Arrays; -import java.util.Set; - -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.mongodb.GeoMongoDBStorageStrategy.GeoQueryType; -import mvm.rya.mongodb.MongoDBRdfConfiguration; - -import org.apache.hadoop.conf.Configuration; -import org.apache.log4j.Logger; -import org.openrdf.model.Literal; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.QueryEvaluationException; - -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; -import com.mongodb.MongoClient; -import com.mongodb.MongoCredential; -import com.mongodb.ServerAddress; -import com.vividsolutions.jts.geom.Geometry; - -public class MongoGeoIndexer extends AbstractMongoIndexer implements GeoIndexer{ - - private static final Logger logger = Logger.getLogger(MongoGeoIndexer.class); - - private GeoMongoDBStorageStrategy storageStrategy; - private MongoClient mongoClient; - private DB db; - private DBCollection coll; - private Set predicates; - private Configuration conf; - private boolean isInit = false; - private String tableName = ""; - - - - private void init() throws NumberFormatException, UnknownHostException{ - ServerAddress server = new ServerAddress(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE), - Integer.valueOf(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT))); - this.conf = conf; - if (conf.get(MongoDBRdfConfiguration.MONGO_USER) != null){ - MongoCredential cred = MongoCredential.createCredential(conf.get(MongoDBRdfConfiguration.MONGO_USER), conf.get(MongoDBRdfConfiguration.MONGO_USER_PASSWORD), - conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME).toCharArray()); - mongoClient = new MongoClient(server, Arrays.asList(cred)); - } - else { - mongoClient = new MongoClient(server); - } - predicates = ConfigUtils.getGeoPredicates(conf); - tableName = conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME); - db = mongoClient.getDB(tableName); - coll = db.getCollection(conf.get(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya") + "_geo"); - storageStrategy = new GeoMongoDBStorageStrategy(Double.valueOf(conf.get(MongoDBRdfConfiguration.MONGO_GEO_MAXDISTANCE, "1e-10"))); - } - - - @Override - public String getTableName() { - return tableName; - } - - @Override - public Configuration getConf() { - return conf; - } - - //setConf initializes because index is created via reflection - @Override - public void setConf(Configuration conf) { - this.conf = conf; - if (!isInit) { - try { - init(); - isInit = true; - } catch (NumberFormatException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } catch (UnknownHostException e) { - logger.warn("Unable to initialize index. Throwing Runtime Exception. ", e); - throw new RuntimeException(e); - } - } - } - - - - private void storeStatement(Statement statement) throws IOException { - // if this is a valid predicate and a valid geometry - boolean isValidPredicate = predicates.isEmpty() || predicates.contains(statement.getPredicate()); - - if (isValidPredicate && (statement.getObject() instanceof Literal)) { - - // add it to the collection - try { - DBObject obj = storageStrategy.serialize(statement); - if (obj != null){ - coll.insert(obj); - } - } - catch (com.mongodb.MongoException.DuplicateKey exception){ - // ignore - } - catch (com.mongodb.DuplicateKeyException exception){ - // ignore - } - catch (Exception ex){ - // ignore single exceptions - ex.printStackTrace(); - } - } - } - - - @Override - public void storeStatement(RyaStatement statement) throws IOException { - storeStatement(RyaToRdfConversions.convertStatement(statement)); - } - - - - - @Override - public CloseableIteration queryEquals( - Geometry query, StatementContraints contraints) { - DBObject queryObj = storageStrategy.getQuery(contraints, query, GeoQueryType.EQUALS); - return getIteratorWrapper(queryObj, coll, storageStrategy); - } - - @Override - public CloseableIteration queryDisjoint( - Geometry query, StatementContraints contraints) { - throw new UnsupportedOperationException("Disjoint queries are not supported in Mongo DB."); - } - - @Override - public CloseableIteration queryIntersects( - Geometry query, StatementContraints contraints) { - DBObject queryObj = storageStrategy.getQuery(contraints, query, GeoQueryType.INTERSECTS); - return getIteratorWrapper(queryObj, coll, storageStrategy); - } - - @Override - public CloseableIteration queryTouches( - Geometry query, StatementContraints contraints) { - throw new UnsupportedOperationException("Touches queries are not supported in Mongo DB."); - } - - @Override - public CloseableIteration queryCrosses( - Geometry query, StatementContraints contraints) { - throw new UnsupportedOperationException("Crosses queries are not supported in Mongo DB."); - } - - @Override - public CloseableIteration queryWithin( - Geometry query, StatementContraints contraints) { - DBObject queryObj = storageStrategy.getQuery(contraints, query, GeoQueryType.WITHIN); - return getIteratorWrapper(queryObj, coll, storageStrategy); - } - - - private CloseableIteration getIteratorWrapper(final DBObject query, final DBCollection coll, final GeoMongoDBStorageStrategy storageStrategy) { - - return new CloseableIteration() { - - private DBCursor cursor = null; - - private DBCursor getIterator() throws QueryEvaluationException { - if (cursor == null){ - cursor = coll.find(query); - } - return cursor; - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - return getIterator().hasNext(); - } - - @Override - public Statement next() throws QueryEvaluationException { - DBObject feature = getIterator().next(); - return storageStrategy.deserializeDBObject(feature); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Remove not implemented"); - } - - @Override - public void close() throws QueryEvaluationException { - getIterator().close(); - } - }; - } - - @Override - public CloseableIteration queryContains( - Geometry query, StatementContraints contraints) { - throw new UnsupportedOperationException("Contains queries are not supported in Mongo DB."); - } - - @Override - public CloseableIteration queryOverlaps( - Geometry query, StatementContraints contraints) { - throw new UnsupportedOperationException("Overlaps queries are not supported in Mongo DB."); - } - - @Override - public Set getIndexablePredicates() { - return predicates; - } - - @Override - public void flush() throws IOException { - // TODO Auto-generated method stub - - } - - @Override - public void close() throws IOException { - mongoClient.close(); - } - - -} diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java deleted file mode 100644 index da49904dd..000000000 --- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java +++ /dev/null @@ -1,361 +0,0 @@ -package mvm.rya.indexing.mongodb; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Map; -import java.util.Set; - -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.IndexingExpr; -import mvm.rya.indexing.IteratorFactory; -import mvm.rya.indexing.SearchFunction; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.geo.GeoConstants; -import mvm.rya.indexing.accumulo.geo.GeoTupleSet; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.apache.hadoop.conf.Configuration; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; - -import com.google.common.base.Joiner; -import com.google.common.collect.Maps; -import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.io.ParseException; -import com.vividsolutions.jts.io.WKTReader; - -public class MongoGeoTupleSet extends ExternalTupleSet { - - private Configuration conf; - private GeoIndexer geoIndexer; - private IndexingExpr filterInfo; - - - public MongoGeoTupleSet(IndexingExpr filterInfo, GeoIndexer geoIndexer) { - this.filterInfo = filterInfo; - this.geoIndexer = geoIndexer; - this.conf = geoIndexer.getConf(); - } - - @Override - public Set getBindingNames() { - return filterInfo.getBindingNames(); - } - - public GeoTupleSet clone() { - return new GeoTupleSet(filterInfo, geoIndexer); - } - - @Override - public double cardinality() { - return 0.0; // No idea how the estimate cardinality here. - } - - - @Override - public String getSignature() { - return "(GeoTuple Projection) " + "variables: " + Joiner.on(", ").join(this.getBindingNames()).replaceAll("\\s+", " "); - } - - - - @Override - public boolean equals(Object other) { - if (other == this) { - return true; - } - if (!(other instanceof MongoGeoTupleSet)) { - return false; - } - MongoGeoTupleSet arg = (MongoGeoTupleSet) other; - return this.filterInfo.equals(arg.filterInfo); - } - - @Override - public int hashCode() { - int result = 17; - result = 31*result + filterInfo.hashCode(); - - return result; - } - - - - /** - * Returns an iterator over the result set of the contained IndexingExpr. - *

- * Should be thread-safe (concurrent invocation {@link OfflineIterable} this - * method can be expected with some query evaluators. - */ - @Override - public CloseableIteration evaluate(BindingSet bindings) - throws QueryEvaluationException { - - - URI funcURI = filterInfo.getFunction(); - SearchFunction searchFunction = (new MongoGeoSearchFunctionFactory(conf)).getSearchFunction(funcURI); - if(filterInfo.getArguments().length > 1) { - throw new IllegalArgumentException("Index functions do not support more than two arguments."); - } - - String queryText = filterInfo.getArguments()[0].stringValue(); - - return IteratorFactory.getIterator(filterInfo.getSpConstraint(), bindings, queryText, searchFunction); - } - - - - //returns appropriate search function for a given URI - //search functions used in GeoMesaGeoIndexer to access index - public class MongoGeoSearchFunctionFactory { - - Configuration conf; - - private final Map SEARCH_FUNCTION_MAP = Maps.newHashMap(); - - public MongoGeoSearchFunctionFactory(Configuration conf) { - this.conf = conf; - } - - - /** - * Get a {@link GeoSearchFunction} for a given URI. - * - * @param searchFunction - * @return - */ - public SearchFunction getSearchFunction(final URI searchFunction) { - - SearchFunction geoFunc = null; - - try { - geoFunc = getSearchFunctionInternal(searchFunction); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - } - - return geoFunc; - } - - private SearchFunction getSearchFunctionInternal(final URI searchFunction) throws QueryEvaluationException { - SearchFunction sf = SEARCH_FUNCTION_MAP.get(searchFunction); - - if (sf != null) { - return sf; - } else { - throw new QueryEvaluationException("Unknown Search Function: " + searchFunction.stringValue()); - } - } - - private final SearchFunction GEO_EQUALS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_EQUALS"; - }; - }; - - private final SearchFunction GEO_DISJOINT = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_DISJOINT"; - }; - }; - - private final SearchFunction GEO_INTERSECTS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_INTERSECTS"; - }; - }; - - private final SearchFunction GEO_TOUCHES = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_TOUCHES"; - }; - }; - - private final SearchFunction GEO_CONTAINS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_CONTAINS"; - }; - }; - - private final SearchFunction GEO_OVERLAPS = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_OVERLAPS"; - }; - }; - - private final SearchFunction GEO_CROSSES = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_CROSSES"; - }; - }; - - private final SearchFunction GEO_WITHIN = new SearchFunction() { - - @Override - public CloseableIteration performSearch(String queryText, - StatementContraints contraints) throws QueryEvaluationException { - try { - WKTReader reader = new WKTReader(); - Geometry geometry = reader.read(queryText); - CloseableIteration statements = geoIndexer.queryWithin( - geometry, contraints); - return statements; - } catch (ParseException e) { - throw new QueryEvaluationException(e); - } - } - - @Override - public String toString() { - return "GEO_WITHIN"; - }; - }; - - { - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_EQUALS, GEO_EQUALS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_DISJOINT, GEO_DISJOINT); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_INTERSECTS, GEO_INTERSECTS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_TOUCHES, GEO_TOUCHES); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_CONTAINS, GEO_CONTAINS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_OVERLAPS, GEO_OVERLAPS); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_CROSSES, GEO_CROSSES); - SEARCH_FUNCTION_MAP.put(GeoConstants.GEO_SF_WITHIN, GEO_WITHIN); - } - - } - -} diff --git a/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java b/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java deleted file mode 100644 index 15151188b..000000000 --- a/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java +++ /dev/null @@ -1,507 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -import junit.framework.Assert; -import mvm.rya.indexing.IndexPlanValidator.IndexedExecutionPlanGenerator; -import mvm.rya.indexing.IndexPlanValidator.ValidIndexCombinationGenerator; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Lists; - - -public class ValidIndexCombinationGeneratorTest { - - - - - - - @Test - public void singleIndex() { - String q1 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?f ?m ." // - + " ?m ?d ." // - + " ?f ?m ." // - + " ?m ?d ." // - + "}";// - - - - - - - SPARQLParser parser = new SPARQLParser(); - ParsedQuery pq1 = null; - - - SimpleExternalTupleSet extTup1 = null; - - - - - - - try { - pq1 = parser.parseQuery(q1, null); - - - - extTup1 = new SimpleExternalTupleSet((Projection) pq1.getTupleExpr()); - - - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - List indexList = Lists.newArrayList(); - indexList.add(extTup1); - - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> combos = vic.getValidIndexCombos(indexList); - int size = 0; - while(combos.hasNext()) { - combos.hasNext(); - size++; - combos.next(); - combos.hasNext(); - } - - Assert.assertTrue(!combos.hasNext()); - Assert.assertEquals(1,size); - - - } - - - - - - - @Test - public void medQueryEightOverlapIndex() { - String q1 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?f ?m ." // - + " ?m ?d ." // - + " ?f ?m ." // - + " ?m ?d ." // - + "}";// - - - String q2 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - String q5 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q6 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + " ?t ?u ." // - + "}";// - - - - String q8 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s ?t ." // - + "}";// - - - String q9 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + "}";// - - - - - - - - - - SPARQLParser parser = new SPARQLParser(); - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - ParsedQuery pq6 = null; - ParsedQuery pq7 = null; - ParsedQuery pq8 = null; - ParsedQuery pq9 = null; - - SimpleExternalTupleSet extTup1 = null; - SimpleExternalTupleSet extTup2 = null; - SimpleExternalTupleSet extTup3 = null; - SimpleExternalTupleSet extTup4 = null; - SimpleExternalTupleSet extTup5 = null; - SimpleExternalTupleSet extTup6 = null; - SimpleExternalTupleSet extTup7 = null; - SimpleExternalTupleSet extTup8 = null; - - - - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - pq6 = parser.parseQuery(q6, null); - pq7 = parser.parseQuery(q7, null); - pq8 = parser.parseQuery(q8, null); - pq9 = parser.parseQuery(q9, null); - - - extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - extTup5 = new SimpleExternalTupleSet((Projection) pq6.getTupleExpr()); - extTup6 = new SimpleExternalTupleSet((Projection) pq7.getTupleExpr()); - extTup7 = new SimpleExternalTupleSet((Projection) pq8.getTupleExpr()); - extTup8 = new SimpleExternalTupleSet((Projection) pq9.getTupleExpr()); - - - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - List indexList = Lists.newArrayList(); - indexList.add(extTup1); - indexList.add(extTup2); - indexList.add(extTup3); - indexList.add(extTup4); - indexList.add(extTup5); - indexList.add(extTup6); - indexList.add(extTup7); - indexList.add(extTup8); - - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> combos = vic.getValidIndexCombos(indexList); - int size = 0; - while(combos.hasNext()) { - combos.hasNext(); - size++; - combos.next(); - combos.hasNext(); - } - - Assert.assertTrue(!combos.hasNext()); - Assert.assertEquals(21,size); - - - } - - - - - - @Test - public void largeQuerySixteenIndexTest() { - - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - String q3 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + "}";// - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - - - Assert.assertEquals(16, indexSet.size()); - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> eSet = vic.getValidIndexCombos(Lists.newArrayList(indexSet)); - - int size = 0; - while(eSet.hasNext()) { - size++; - Assert.assertTrue(eSet.hasNext()); - eSet.next(); - } - - - Assert.assertTrue(!eSet.hasNext()); - Assert.assertEquals(75, size); - - } - - - - - - - @Test - public void largeQueryFourtyIndexTest() { - - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - String q3 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + "}";// - - - - String q4 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g ?a ?b ?c" // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?c ?a . "// - + "}";// - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(40, indexSet.size()); - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> eSet = vic.getValidIndexCombos(Lists.newArrayList(indexSet)); - - int size = 0; - while(eSet.hasNext()) { - size++; - Assert.assertTrue(eSet.hasNext()); - eSet.next(); - } - - Assert.assertTrue(!eSet.hasNext()); - Assert.assertEquals(123, size); - } - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java b/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java deleted file mode 100644 index 8b76f7f6c..000000000 --- a/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java +++ /dev/null @@ -1,1903 +0,0 @@ -package mvm.rya.accumulo.documentIndex; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import junit.framework.Assert; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.RyaTableMutationsFactory; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.indexing.accumulo.entity.EntityCentricIndex; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.vocabulary.XMLSchema; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; -import org.openrdf.repository.RepositoryException; - -import com.google.common.primitives.Bytes; - - -public class DocumentIndexIntersectingIteratorTest { - - - - private Connector accCon; - String tablename = "table"; - - - @Before - public void init() throws RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, AccumuloException, AccumuloSecurityException, TableExistsException { - - accCon = new MockInstance().getConnector("root", "".getBytes()); - accCon.tableOperations().create(tablename); - - } - - - - - - - -@Test - public void testBasicColumnObj() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - m.put(new Text("cf"), new Text(null + "\u0000" + "obj" + "\u0000" + "cq"), new Value(new byte[0])); - m.put(new Text("cF"), new Text(null + "\u0000" +"obj" + "\u0000" + "cQ"), new Value(new byte[0])); - - if (i == 30 || i == 60) { - m.put(new Text("CF"), new Text(null + "\u0000" +"obj" + "\u0000" + "CQ"), new Value(new byte[0])); - } - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf"), new Text("obj" + "\u0000" + "cq" )); - TextColumn tc2 = new TextColumn(new Text("cF"), new Text("obj" + "\u0000" + "cQ" )); - TextColumn tc3 = new TextColumn(new Text("CF"), new Text("obj" + "\u0000" + "CQ" )); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 1****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(2, results); - - - - - } - - - - - - - -@Test - public void testBasicColumnObjPrefix() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - m.put(new Text("cf"), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" ), new Value(new byte[0])); - m.put(new Text("cF"), new Text(null + "\u0000" +"obj" + "\u0000" + "cQ"), new Value(new byte[0])); - - if (i == 30 || i == 60) { - m.put(new Text("CF"), new Text(null + "\u0000" +"obj" + "\u0000" + "CQ" ), new Value(new byte[0])); - } - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf"), new Text("obj" + "\u0000" + "cq")); - TextColumn tc2 = new TextColumn(new Text("cF"), new Text("obj" + "\u0000" + "cQ")); - TextColumn tc3 = new TextColumn(new Text("CF"), new Text("obj")); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - tc3.setIsPrefix(true); - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 2****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(2, results); - - - - - } - - - - -@Test - public void testBasicColumnSubjObjPrefix() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - m.put(new Text("cf"), new Text(null + "\u0000" +"obj" + "\u0000" + "cq"), new Value(new byte[0])); - m.put(new Text("cF"), new Text(null + "\u0000" +"obj" + "\u0000" + "cQ"), new Value(new byte[0])); - - if (i == 30 ) { - m.put(new Text("CF"), new Text(null + "\u0000" +"obj" + "\u0000" + "CQ"), new Value(new byte[0])); - } - - if (i == 60) { - m.put(new Text("CF"), new Text(null + "\u0000" +"subj" + "\u0000" + "CQ"), new Value(new byte[0])); - } - - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf"), new Text("obj" + "\u0000" + "cq" )); - TextColumn tc2 = new TextColumn(new Text("cF"), new Text("obj" + "\u0000" + "cQ")); - TextColumn tc3 = new TextColumn(new Text("CF"), new Text("subj")); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - tc3.setIsPrefix(true); - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 3****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(1, results); - - - - - } - - - - -@Test - public void testOneHundredColumnSubjObj() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - for(int j= 0; j < 100; j++) { - m.put(new Text("cf" + j), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + j), new Value(new byte[0])); - } - - if (i == 30 ) { - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 100), new Value(new byte[0])); - } - - if (i == 60) { - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 100), new Value(new byte[0])); - } - - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" + 20), new Text("obj" + "\u0000" + "cq" + 20)); - TextColumn tc2 = new TextColumn(new Text("cf" + 50), new Text("obj" + "\u0000" + "cq" + 50)); - TextColumn tc3 = new TextColumn(new Text("cf" + 100), new Text("obj" + "\u0000" + "cq" + 100)); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 4****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(1, results); - - - - - } - - - - -@Test - public void testOneHundredColumnObjPrefix() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - for(int j= 0; j < 100; j++) { - m.put(new Text("cf" + j), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + j ), new Value(new byte[0])); - } - - if (i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + (100 + i)), new Value(new byte[0])); - } - - - - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" + 20), new Text("obj" + "\u0000" + "cq" + 20)); - TextColumn tc2 = new TextColumn(new Text("cf" + 50), new Text("obj" + "\u0000" + "cq" + 50)); - TextColumn tc3 = new TextColumn(new Text("cf" + 100), new Text("obj")); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - tc3.setIsPrefix(true); - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 5****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(4, results); - - - - - } - - - - - - - -@Test - public void testOneHundredColumnMultipleEntriesPerSubject() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - for(int j= 0; j < 100; j++) { - m.put(new Text("cf" + j), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + j ), new Value(new byte[0])); - } - - if (i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + (100 + i)), new Value(new byte[0])); - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + (100 + i + 1)), new Value(new byte[0])); - } - - - - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" + 20), new Text("obj" + "\u0000" + "cq" + 20 )); - TextColumn tc2 = new TextColumn(new Text("cf" + 50), new Text("obj" + "\u0000" + "cq" + 50)); - TextColumn tc3 = new TextColumn(new Text("cf" + 100), new Text("obj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 6****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(8, results); - - - - - } - - - - - -@Test -public void testOneHundredColumnSubjObjPrefix() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - for(int j= 0; j < 100; j++) { - m.put(new Text("cf" + j), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + j), new Value(new byte[0])); - } - - if (i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + (100 + i)), new Value(new byte[0])); - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + (100 + i + 1)), new Value(new byte[0])); - } - - - - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" + 20), new Text("obj" + "\u0000" + "cq" + 20)); - TextColumn tc2 = new TextColumn(new Text("cf" + 50), new Text("obj" + "\u0000" + "cq" + 50)); - TextColumn tc3 = new TextColumn(new Text("cf" + 100), new Text("subj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 7****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(4, results); - - - - -} - - - - - - -@Test -public void testOneHundredColumnSubjObjPrefixFourTerms() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - for(int j= 0; j < 100; j++) { - m.put(new Text("cf" + j), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + j), new Value(new byte[0])); - } - - if (i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + (100 + i)), new Value(new byte[0])); - m.put(new Text("cf" + 100), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + (100 + i + 1)), new Value(new byte[0])); - } - - - - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" + 20), new Text("obj" + "\u0000" + "cq" + 20)); - TextColumn tc2 = new TextColumn(new Text("cf" + 50), new Text("obj" + "\u0000" + "cq" + 50)); - TextColumn tc3 = new TextColumn(new Text("cf" + 100), new Text("subj")); - TextColumn tc4 = new TextColumn(new Text("cf" + 100), new Text("obj")); - - tc3.setIsPrefix(true); - tc4.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[4]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - tc[3] = tc4; - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 8****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(4, results); - - - - -} - - - - - - -//@Test -public void testOneHundredColumnSameCf() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - for(int j= 0; j < 100; j++) { - m.put(new Text("cf"), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + j), new Value(new byte[0])); - } - - - - bw.addMutation(m); - - } - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" ), new Text("obj" + "\u0000" + "cq" + 20)); - TextColumn tc2 = new TextColumn(new Text("cf"), new Text("obj" + "\u0000" + "cq" + 50)); - TextColumn tc3 = new TextColumn(new Text("cf" ), new Text("obj" + "\u0000" + "cq" + 80)); - TextColumn tc4 = new TextColumn(new Text("cf"), new Text("obj")); - - tc4.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[4]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - tc[3] = tc4; - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 9****************************"); - for (Map.Entry e : scan) { - //System.out.println(e); - results++; - } - - - Assert.assertEquals(10000, results); - - - - -} - - - - - -@Test -public void testGeneralStarQuery() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - - - - if(i == 30 || i == 60 ) { - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - } - - bw.addMutation(m); - - } - - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("subj" + "\u0000" + "cq" + 3)); - - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 10****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(2, results); - - - - -} - - - - - - - -@Test -public void testGeneralStarQuerySubjPrefix() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - } - - bw.addMutation(m); - - } - - - DocumentIndexIntersectingIterator dii = new DocumentIndexIntersectingIterator(); - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("subj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - dii.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 11****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(4, results); - - - - -} - - - - - -@Test -public void testGeneralStarQueryMultipleSubjPrefix() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 3 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 4), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 4), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 5), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 5), new Value(new byte[0])); - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("subj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 12****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(12, results); - - - - -} - - - - -@Test -public void testFixedRangeColumnValidateExact() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 4), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 4), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 5), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 5), new Value(new byte[0])); - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("subj" + "\u0000" + "cq" + 3)); - TextColumn tc4 = new TextColumn(new Text("cf" + 3), new Text("subj" + "\u0000" + "cq" + 4)); - TextColumn tc5 = new TextColumn(new Text("cf" + 3), new Text("subj" + "\u0000" + "cq" + 5)); - - - - TextColumn[] tc = new TextColumn[5]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - tc[3] = tc4; - tc[4] = tc5; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.setRange(Range.exact(new Text("" + 30))); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 14****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(1, results); - - - - -} - - - - - - -@Test -public void testLubmLikeTest() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m1 = new Mutation(new Text("ProfessorA" + i)); - Mutation m2= new Mutation(new Text("ProfessorB" + i)); - - m1.put(new Text("http://swat.cse.lehigh.edu/onto/univ-bench.owl#doctoralDegreeFrom"), - new Text(null + "\u0000" +"object" + "\u0000" + "http://www.University" + i + ".edu"), new Value(new byte[0])); - m2.put(new Text("http://swat.cse.lehigh.edu/onto/univ-bench.owl#doctoralDegreeFrom"), - new Text(null + "\u0000" +"object" + "\u0000" + "http://www.University" + i + ".edu"), new Value(new byte[0])); - m1.put(new Text("http://swat.cse.lehigh.edu/onto/univ-bench.owl#teacherOf"), - new Text(null + "\u0000" +"object" + "\u0000" + "http://Course" + i), new Value(new byte[0])); - m2.put(new Text("http://swat.cse.lehigh.edu/onto/univ-bench.owl#teacherOf"), - new Text(null + "\u0000" +"object" + "\u0000" + "http://Course" + i), new Value(new byte[0])); - - - bw.addMutation(m1); - bw.addMutation(m2); - - } - - - - TextColumn tc1 = new TextColumn(new Text("http://swat.cse.lehigh.edu/onto/univ-bench.owl#doctoralDegreeFrom" ), - new Text("object" + "\u0000" + "http://www.University" + 30 + ".edu")); - TextColumn tc2 = new TextColumn(new Text("http://swat.cse.lehigh.edu/onto/univ-bench.owl#teacherOf"), - new Text("object" + "\u0000" + "http://Course" + 30)); - - - - - TextColumn[] tc = new TextColumn[2]; - tc[0] = tc1; - tc[1] = tc2; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 15****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(2, results); - - - - -} - - - - - - - - - - - - - - - - - -@Test -public void testFixedRangeColumnValidateSubjPrefix() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 3 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 4), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 4 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"obj" + "\u0000" + "cq" + 5 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" +"subj" + "\u0000" + "cq" + 5 ), new Value(new byte[0])); - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("subj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - scan.setRange(Range.exact(new Text("" + 30))); - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 13****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(3, results); - - - - -} - - - - - -//@Test -//public void testRangeBound() { -// -// BatchWriter bw = null; -// -// try { -// -// -// -// -// for (int i = 0; i < 100; i++) { -// -// Mutation m = new Mutation(new Text("" + i)); -// -// m.put(new Text("cf" + 1), new Text("obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); -// m.put(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2), new Value(new byte[0])); -// m.put(new Text("cf" + 1), new Text("subj" + "\u0000" + "cq" + 1), new Value(new byte[0])); -// m.put(new Text("cf" + 2), new Text("subj" + "\u0000" + "cq" + 2), new Value(new byte[0])); -// -// -// -// if(i == 30 || i == 60 || i == 90 || i == 99) { -// m.put(new Text("cf" + 3), new Text("obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); -// m.put(new Text("cf" + 3), new Text("subj" + "\u0000" + "cq" + 3), new Value(new byte[0])); -// m.put(new Text("cf" + 3), new Text("obj" + "\u0000" + "cq" + 4), new Value(new byte[0])); -// m.put(new Text("cf" + 3), new Text("subj" + "\u0000" + "cq" + 4), new Value(new byte[0])); -// m.put(new Text("cf" + 3), new Text("obj" + "\u0000" + "cq" + 5), new Value(new byte[0])); -// m.put(new Text("cf" + 3), new Text("subj" + "\u0000" + "cq" + 5), new Value(new byte[0])); -// } -// -// bw.addMutation(m); -// -// } -// -// -// -// Text cf = new Text("cf" + 3); -// Text cq = new Text("obj" + "\u0000" + "cq" + 3); -// -// Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); -// scan.fetchColumn(cf, cq ); -// scan.setRange(new Range()); -// -// -// int results = 0; -// System.out.println("************************Test 14****************************"); -// for (Map.Entry e : scan) { -// System.out.println(e); -// results++; -// } -// -// -// -// -// -// -// } catch (MutationsRejectedException e) { -// // TODO Auto-generated catch block -// e.printStackTrace(); -// } catch (TableNotFoundException e) { -// // TODO Auto-generated catch block -// e.printStackTrace(); -// } -// -//} - - - - - -@Test -public void testContext1() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 3 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 4 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 5 ), new Value(new byte[0])); - - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("obj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - DocumentIndexIntersectingIterator.setContext(is, "context1"); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 14****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(8, results); - - - - -} - - - - - - - -@Test -public void testContext2() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 4 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context3" + "\u0000" +"obj" + "\u0000" + "cq" + 5 ), new Value(new byte[0])); - - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("obj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - DocumentIndexIntersectingIterator.setContext(is, "context2"); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 15****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(0, results); - - - - -} - - - - - - - - -@Test -public void testContext3() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 1 + "\u0000" + "context1"), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 2 + "\u0000" + "context1"), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 1 + "\u0000" + "context2"), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 2 + "\u0000" + "context2"), new Value(new byte[0])); - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 3 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 4 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context3" + "\u0000" +"obj" + "\u0000" + "cq" + 5 ), new Value(new byte[0])); - - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("obj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - DocumentIndexIntersectingIterator.setContext(is, "context2"); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 16****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(4, results); - - - - -} - - - - - - - - - -@Test -public void testContext4() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text("context1" + "\u0000" +"obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context2" + "\u0000" +"obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - - - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("obj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 17****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(8, results); - - - - - -} - - - - - -@Test -public void testContext5() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("" + i)); - - m.put(new Text("cf" + 1), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text("context2" + "\u0000" + "obj" + "\u0000" + "cq" + 1), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context2" + "\u0000" + "obj" + "\u0000" + "cq" + 2), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text(null + "\u0000" + "obj" + "\u0000" + "cq" + 1 ), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text(null + "\u0000" + "obj" + "\u0000" + "cq" + 2 ), new Value(new byte[0])); - - - if(i == 30 || i == 60 || i == 90 || i == 99) { - m.put(new Text("cf" + 3), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + 3), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text("context2" + "\u0000" + "obj" + "\u0000" + "cq" + 3 ), new Value(new byte[0])); - m.put(new Text("cf" + 3), new Text(null + "\u0000" + "obj" + "\u0000" + "cq" + 3 ), new Value(new byte[0])); - - - } - - bw.addMutation(m); - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" + "\u0000" + "cq" + 1)); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" + "\u0000" + "cq" + 2)); - TextColumn tc3 = new TextColumn(new Text("cf" + 3), new Text("obj")); - - tc3.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 18****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(12, results); - - - - -} - - - - - - -@Test -public void testContext6() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 100; i++) { - - Mutation m = new Mutation(new Text("row" + i)); - - - m.put(new Text("cf" + 1), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + i), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" + "subj" + "\u0000" + "cq" + i), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text("context2" + "\u0000" + "obj" + "\u0000" + "cq" + i), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context2" + "\u0000" + "subj" + "\u0000" + "cq" + i), new Value(new byte[0])); - - - bw.addMutation(m); - - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" )); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("subj" )); - - - tc1.setIsPrefix(true); - tc2.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[2]; - tc[0] = tc1; - tc[1] = tc2; - - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - DocumentIndexIntersectingIterator.setContext(is, "context2"); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 19****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(100, results); - - - - -} - - - -@Test -public void testContext7() throws Exception { - - BatchWriter bw = null; - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 10; i++) { - - Mutation m = new Mutation(new Text("row" + i)); - - - m.put(new Text("cf" + 1), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + i), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + i), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context1" + "\u0000" + "obj" + "\u0000" + "cq" + 100 + i), new Value(new byte[0])); - m.put(new Text("cf" + 1), new Text("context2" + "\u0000" + "obj" + "\u0000" + "cq" + i), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context2" + "\u0000" + "obj" + "\u0000" + "cq" + i), new Value(new byte[0])); - m.put(new Text("cf" + 2), new Text("context2" + "\u0000" + "obj" + "\u0000" + "cq" + 100+i), new Value(new byte[0])); - - - bw.addMutation(m); - - - } - - - - TextColumn tc1 = new TextColumn(new Text("cf" + 1 ), new Text("obj" )); - TextColumn tc2 = new TextColumn(new Text("cf" + 2), new Text("obj" )); - - - tc1.setIsPrefix(true); - tc2.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[2]; - tc[0] = tc1; - tc[1] = tc2; - - - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 20****************************"); - for (Map.Entry e : scan) { - System.out.println(e); - results++; - } - - - Assert.assertEquals(40, results); - - - - -} - - - - - - - -@Test -public void testSerialization1() throws Exception { - - BatchWriter bw = null; - AccumuloRdfConfiguration acc = new AccumuloRdfConfiguration(); - acc.set(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS, EntityCentricIndex.class.getName()); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(acc)); - - bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 20; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaType(XMLSchema.STRING, "cq1")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2")); - RyaStatement rs3 = null; - RyaStatement rs4 = null; - - if(i == 5 || i == 15) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i))); - rs4 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.STRING,Integer.toString(i))); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (rs3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - if (rs4 != null) { - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X 5 ."// - + "}"; - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X \"15\" ."// - + "}"; - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te1 = pq1.getTupleExpr(); - TupleExpr te2 = pq2.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - List spList2 = StatementPatternCollector.process(te2); - - System.out.println(spList1); - System.out.println(spList2); - - RyaType rt1 = RdfToRyaConversions.convertValue(spList1.get(2).getObjectVar().getValue()); - RyaType rt2 = RdfToRyaConversions.convertValue(spList2.get(2).getObjectVar().getValue()); - - RyaURI predURI1 = (RyaURI) RdfToRyaConversions.convertValue(spList1.get(0).getPredicateVar().getValue()); - RyaURI predURI2 = (RyaURI) RdfToRyaConversions.convertValue(spList1.get(1).getPredicateVar().getValue()); - RyaURI predURI3 = (RyaURI) RdfToRyaConversions.convertValue(spList1.get(2).getPredicateVar().getValue()); - -// System.out.println("to string" + spList1.get(2).getObjectVar().getValue().stringValue()); -// System.out.println("converted obj" + rt1.getData()); -// System.out.println("equal: " + rt1.getData().equals(spList1.get(2).getObjectVar().getValue().stringValue())); - - - System.out.println(rt1); - System.out.println(rt2); - - RyaContext rc = RyaContext.getInstance(); - - byte[][] b1 = rc.serializeType(rt1); - byte[][] b2 = rc.serializeType(rt2); - - byte[] b3 = Bytes.concat("object".getBytes(), "\u0000".getBytes(), b1[0], b1[1]); - byte[] b4 = Bytes.concat("object".getBytes(), "\u0000".getBytes(), b2[0], b2[1]); - - System.out.println(new String(b3)); - System.out.println(new String(b4)); - - TextColumn tc1 = new TextColumn(new Text(predURI1.getData()), new Text("object")); - TextColumn tc2 = new TextColumn(new Text(predURI2.getData()), new Text("object")); - TextColumn tc3 = new TextColumn(new Text(predURI3.getData()), new Text(b3)); - - tc1.setIsPrefix(true); - tc2.setIsPrefix(true); - - TextColumn[] tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - Scanner scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - int results = 0; - System.out.println("************************Test 21****************************"); - Text t = null; - for (Map.Entry e : scan) { - t = e.getKey().getColumnQualifier(); - System.out.println(e); - results++; - } - - Assert.assertEquals(1, results); - String [] s = t.toString().split("\u001D" + "\u001E"); - String[] s1 = s[2].split("\u0000"); - RyaType rt = rc.deserialize(s1[2].getBytes()); - System.out.println("Rya type is " + rt); - org.openrdf.model.Value v = RyaToRdfConversions.convertValue(rt); - Assert.assertTrue(v.equals(spList1.get(2).getObjectVar().getValue())); - - tc1 = new TextColumn(new Text(predURI1.getData()), new Text("object")); - tc2 = new TextColumn(new Text(predURI2.getData()), new Text("object")); - tc3 = new TextColumn(new Text(predURI3.getData()), new Text(b4)); - - tc1.setIsPrefix(true); - tc2.setIsPrefix(true); - - tc = new TextColumn[3]; - tc[0] = tc1; - tc[1] = tc2; - tc[2] = tc3; - - is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class); - - DocumentIndexIntersectingIterator.setColumnFamilies(is, tc); - - scan = accCon.createScanner(tablename, new Authorizations("auths")); - - scan.addScanIterator(is); - - results = 0; - System.out.println("************************Test 21****************************"); - - for (Map.Entry e : scan) { - t = e.getKey().getColumnQualifier(); - System.out.println(e); - results++; - } - - Assert.assertEquals(1, results); - s = t.toString().split("\u001D" + "\u001E"); - s1 = s[2].split("\u0000"); - rt = rc.deserialize(s1[2].getBytes()); - System.out.println("Rya type is " + rt); - v = RyaToRdfConversions.convertValue(rt); - Assert.assertTrue(v.equals(spList2.get(2).getObjectVar().getValue())); - - - - -} - - - - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java deleted file mode 100644 index bfea0bd25..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java +++ /dev/null @@ -1,325 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import junit.framework.Assert; -import mvm.rya.indexing.external.ExternalProcessor; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.junit.Test; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -public class GeneralizedExternalProcessorTest { - - private String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - private String q8 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + "}";// - - - - - private String q11 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + "}";// - - - private String q12 = ""// - + "SELECT ?b ?p ?dog ?cat " // - + "{" // - + " ?b a ?p ."// - + " ?dog a ?cat. "// - + "}";// - - - - private String q13 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit ?dick ?jane ?betty " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + " ?dick ?jane . "// - + " ?jane ?betty . "// - + "}";// - - private String q14 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + " ?d ?a . "// - + "}";// - - - private String q15 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - private String q16 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - private String q17 = ""// - + "SELECT ?dog ?cat ?chicken " // - + "{" // - + " ?chicken ?dog . "// - + " ?cat ?chicken ."// - + "}";// - - private String q18 = ""// - + "SELECT ?dog ?chicken " // - + "{" // - + " ?chicken ?dog . "// - + "}";// - - private String q19 = ""// - + "SELECT ?cat ?chicken " // - + "{" // - + " ?cat ?chicken ."// - + "}";// - - - - - - - - - - //@Test - public void testTwoIndexLargeQuery() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q15, null); - ParsedQuery pq2 = parser.parseQuery(q7, null); - ParsedQuery pq3 = parser.parseQuery(q12, null); - - - - System.out.println("Query is " + pq1.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - //SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq5.getTupleExpr())); - - - List list = new ArrayList(); - - list.add(extTup2); - //list.add(extTup3); - list.add(extTup1); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(),list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(4, indexSet.size()); - -// System.out.println("Normalized indices are: "); -// for(ExternalTupleSet e: indexSet) { -// System.out.println(e.getTupleExpr()); -// } - - Set processedTups = Sets.newHashSet(iep.getIndexedTuples()); - - Assert.assertEquals(5, processedTups.size()); - - // System.out.println("Size is " + processedTups.size()); - -// System.out.println("Indexed tuples are :" ); -// for(TupleExpr te: processedTups) { -// System.out.println(te); -// } - - - - - - - } - - - - - - @Test - public void testThreeIndexQuery() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q16, null); - ParsedQuery pq2 = parser.parseQuery(q17, null); - ParsedQuery pq3 = parser.parseQuery(q18, null); - ParsedQuery pq4 = parser.parseQuery(q19, null); - - - - System.out.println("Query is " + pq1.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup3); - list.add(extTup1); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(),list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(6, indexSet.size()); - -// System.out.println("Normalized indices are: "); -// for(ExternalTupleSet e: indexSet) { -// System.out.println(e.getTupleExpr()); -// } - - Set processedTups = Sets.newHashSet(iep.getIndexedTuples()); - - Assert.assertEquals(17, processedTups.size()); - - // System.out.println("Size is " + processedTups.size()); - -// System.out.println("Indexed tuples are :" ); -// for(TupleExpr te: processedTups) { -// System.out.println(te); -// } - - - TupleExecutionPlanGenerator tep = new TupleExecutionPlanGenerator(); - List plans = Lists.newArrayList(tep.getPlans(processedTups.iterator())); - - - System.out.println("Size is " + plans.size()); - - System.out.println("Possible indexed tuple plans are :" ); - for(TupleExpr te: plans) { - System.out.println(te); - } - - - - - } - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java deleted file mode 100644 index eea5b95dc..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java +++ /dev/null @@ -1,1148 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -import junit.framework.Assert; -import mvm.rya.indexing.IndexPlanValidator.ThreshholdPlanSelectorTest.NodeCollector; -import mvm.rya.indexing.external.ExternalProcessor; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Lists; - -public class IndexPlanValidatorTest { - - - @Test - public void testEvaluateTwoIndexTwoVarOrder1() { - - System.out.println("********************Test number 1***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - index.add(ais1); - index.add(ais2); - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(false, ipv.isValid(tup)); - - } - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder2() { - - System.out.println("********************Test number 2***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - index.add(ais1); - index.add(ais2); - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(true, ipv.isValid(tup)); - - } - - - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder3() { - - - System.out.println("********************Test number 3***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?l ?e ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - index.add(ais1); - index.add(ais2); - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(true, ipv.isValid(tup)); - - } - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder4() { - - - System.out.println("********************Test number 4***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?c ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - index.add(ais1); - index.add(ais2); - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(false, ipv.isValid(tup)); - - } - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder5() { - - System.out.println("********************Test number 5***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?l ?o ?e " // - + "{" // - + " ?o ?l ."// - + " ?e ?o . "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr()); - - System.out.println("Supported variable orders are " + ais1.getSupportedVariableOrders() + ", " + ais2.getSupportedVariableOrders()); - - index.add(ais2); - index.add(ais1); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - System.out.println("query assured binding names are " + pq.getTupleExpr().getAssuredBindingNames()); - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(false, ipv.isValid(tup)); - - } - - - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder6() { - - - System.out.println("********************Test number 6***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?l ?e ?o " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - index.add(ais2); - index.add(ais1); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(true, ipv.isValid(tup)); - - } - - - - - @Test - public void testEvaluateTwoIndexCrossProduct1() { - - System.out.println("********************Test number 7***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?l ?o " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o ?f ?g " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l . "// - + " ?f ?g . " // - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - index.add(ais2); - index.add(ais1); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(true); - Assert.assertEquals(false, ipv.isValid(tup)); - - } - - - - - @Test - public void testEvaluateTwoIndexCrossProduct2() { - - System.out.println("********************Test number 8***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?l ?o " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o ?f ?g " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l . "// - + " ?f ?g . " // - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - - index.add(ais1); - index.add(ais2); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(true); - Assert.assertEquals(false, ipv.isValid(tup)); - - } - - - - @Test - public void testEvaluateTwoIndexCrossProduct3() { - - System.out.println("********************Test number 9***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?l ?o " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o ?f ?g " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l . "// - + " ?f ?g . " // - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - - index.add(ais1); - index.add(ais2); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(true, ipv.isValid(tup)); - - } - - - - - - - - - @Test - public void testEvaluateTwoIndexDiffVars() { - - System.out.println("********************Test number 10***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?chicken ?dog ?pig " // - + "{" // - + " ?dog a ?chicken . "// - + " ?dog ?pig "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?fish ?ant ?turkey " // - + "{" // - + " ?fish ?turkey . "// - + " ?turkey ?ant "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o ?f ?g " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l . "// - + " ?f ?g . " // - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - - index.add(ais1); - index.add(ais2); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(false, ipv.isValid(tup)); - - } - - - - @Test - public void testEvaluateTwoIndexDiffVars2() { - - System.out.println("********************Test number 11***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?chicken " // - + "{" // - + " ?dog a ?chicken . "// - + " ?dog ?pig "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?fish ?ant ?turkey " // - + "{" // - + " ?fish ?turkey . "// - + " ?turkey ?ant "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o ?f ?g " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l . "// - + " ?f ?g . " // - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - - index.add(ais1); - index.add(ais2); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(true, ipv.isValid(tup)); - - } - - - @Test - public void testEvaluateTwoIndexDiffVars3() { - - System.out.println("********************Test number 11***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?pig ?dog ?chicken " // - + "{" // - + " ?dog a ?chicken . "// - + " ?dog ?pig "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?fish ?ant ?turkey " // - + "{" // - + " ?fish ?turkey . "// - + " ?turkey ?ant "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o ?f ?g " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l . "// - + " ?f ?g . " // - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - - index.add(ais1); - index.add(ais2); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - Assert.assertEquals(true, ipv.isValid(tup)); - - } - - - - - @Test - public void testEvaluateTwoIndexDiffVarsDirProd() { - - System.out.println("********************Test number 12***************************"); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?pig ?dog ?chicken " // - + "{" // - + " ?dog a ?chicken . "// - + " ?dog ?pig "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?fish ?ant ?turkey " // - + "{" // - + " ?fish ?turkey . "// - + " ?turkey ?ant "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o ?f ?g " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l . "// - + " ?f ?g . " // - + "}";// - - - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery index1 = null; - ParsedQuery index2 = null; - try { - index1 = sp.parseQuery(indexSparqlString, null); - index2 = sp.parseQuery(indexSparqlString2, null); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - - List index = Lists.newArrayList(); - - SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection)index1.getTupleExpr()); - SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection)index2.getTupleExpr());; - - - index.add(ais1); - index.add(ais2); - - - ParsedQuery pq = null; - - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - TupleExpr tup = processor.process(pq.getTupleExpr()); - - System.out.println("TupleExpr is " + tup); - - IndexPlanValidator ipv = new IndexPlanValidator(true); - Assert.assertEquals(false, ipv.isValid(tup)); - - } - - - - @Test - public void testValidTupleIterator() throws Exception { - - System.out.println("********************Test number 13***************************"); - - String q1 = ""// - + "SELECT ?f ?m ?d ?h ?i " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?d ?f ." // - + " ?f ?h ." // - + " ?f ?i ." // - + " ?i ?h ." // - + "}";// - - String q2 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - ParsedQuery pq3 = parser.parseQuery(q3, null); - ParsedQuery pq4 = parser.parseQuery(q4, null); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - - Iterator plans = (new TupleExecutionPlanGenerator()).getPlans(iep.getIndexedTuples()); - IndexPlanValidator ipv = new IndexPlanValidator(true); - Iterator validPlans = ipv.getValidTuples(plans); - - int size = 0; - - while(validPlans.hasNext()) { - Assert.assertTrue(validPlans.hasNext()); - validPlans.next(); - size++; - } - - Assert.assertTrue(!validPlans.hasNext()); - Assert.assertEquals(732, size); - - - - } - - - - - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java deleted file mode 100644 index 79a665630..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java +++ /dev/null @@ -1,423 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Set; -import junit.framework.Assert; -import mvm.rya.indexing.external.ExternalProcessor; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.junit.Test; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Lists; - -public class IndexedExecutionPlanGeneratorTest { - - private String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - private String q12 = ""// - + "SELECT ?b ?p ?dog ?cat " // - + "{" // - + " ?b a ?p ."// - + " ?dog a ?cat. "// - + "}";// - - private String q15 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - private String q16 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?l ?c . "// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - private String q17 = ""// - + "SELECT ?dog ?cat ?chicken " // - + "{" // - + " ?chicken ?dog . "// - + " ?cat ?chicken ."// - + "}";// - - private String q18 = ""// - + "SELECT ?cat ?chicken ?pig ?duck " // - + "{" // - + " ?cat ?chicken. "// - + " ?pig ?duck . "// - + "}";// - - - - private String q19 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f ?m . "// - + " ?d ?e . "// - + " ?l ?c . "// - + "}";// - - private String q20 = ""// - + "SELECT ?f ?m " // - + "{" // - + " ?f ?m . "// - + "}";// - - - private String q21 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > 3). " // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - private String q22 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " Filter(?f > 3) ."// - + " Filter(?e > 3) ."// - + " ?e a ?f ." // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - - private String q23 = ""// - + "SELECT ?h ?i ?j " // - + "{" // - + " Filter(?h > 3) ."// - + " Filter(?i > 3) ."// - + " ?h a ?i ." // - + " ?h a ?j ."// - + "}";// - - - - - - - @Test - public void testTwoIndexLargeQuery() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q15, null); - ParsedQuery pq2 = parser.parseQuery(q7, null); - ParsedQuery pq3 = parser.parseQuery(q12, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(4, indexSet.size()); - - Iterator processedTups = iep.getIndexedTuples(); - - int size = 0; - - while (processedTups.hasNext()) { - Assert.assertTrue(processedTups.hasNext()); - processedTups.next(); - size++; - } - - Assert.assertTrue(!processedTups.hasNext()); - - Assert.assertEquals(5, size); - - } - - - - - - @Test - public void testThreeSingleNodeIndex() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q19, null); - ParsedQuery pq2 = parser.parseQuery(q20, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup1); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(3, indexSet.size()); - - Iterator processedTups = iep.getIndexedTuples(); - - int size = 0; - - while(processedTups.hasNext()) { - Assert.assertTrue(processedTups.hasNext()); - processedTups.next(); - size++; - } - Assert.assertTrue(!processedTups.hasNext()); - - Assert.assertEquals(3, size); - - } - - - - @Test - public void testThreeIndexQuery() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q16, null); - ParsedQuery pq2 = parser.parseQuery(q17, null); - ParsedQuery pq3 = parser.parseQuery(q18, null); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(),list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(6, indexSet.size()); - - Iterator processedTups = iep.getIndexedTuples(); - - int size = 0; - - while(processedTups.hasNext()) { - Assert.assertTrue(processedTups.hasNext()); - processedTups.next(); - size++; - } - - Assert.assertTrue(!processedTups.hasNext()); - Assert.assertEquals(9, size); - - - } - - - - - @Test - public void testThrowsException1() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q16, null); - ParsedQuery pq2 = parser.parseQuery(q17, null); - ParsedQuery pq3 = parser.parseQuery(q18, null); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(),list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(6, indexSet.size()); - - Iterator processedTups = iep.getIndexedTuples(); - - - boolean exceptionThrown = false; - - try{ - processedTups.remove(); - } catch(UnsupportedOperationException e) { - exceptionThrown = true; - } - - Assert.assertTrue(exceptionThrown); - - - } - - - @Test - public void testThrowsException2() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q19, null); - ParsedQuery pq2 = parser.parseQuery(q20, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup1); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(3, indexSet.size()); - - Iterator processedTups = iep.getIndexedTuples(); - - int size = 0; - - processedTups.next(); - processedTups.next(); - processedTups.next(); - - boolean exceptionThrown = false; - try { - processedTups.next(); - } catch (NoSuchElementException e) { - exceptionThrown = true; - } - - Assert.assertTrue(exceptionThrown); - - } - - - - - - @Test - public void testThreeIndexQueryFilter() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q22, null); - ParsedQuery pq2 = parser.parseQuery(q7, null); - ParsedQuery pq3 = parser.parseQuery(q21, null); - ParsedQuery pq4 = parser.parseQuery(q23, null); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(),list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(5, indexSet.size()); - - Iterator processedTups = iep.getIndexedTuples(); - - - int size = 0; - - while(processedTups.hasNext()) { - Assert.assertTrue(processedTups.hasNext()); - TupleExpr te = processedTups.next(); - System.out.println(te); - size++; - } - - Assert.assertTrue(!processedTups.hasNext()); - Assert.assertEquals(10, size); - - - } - - - - - - - - - - - - - - - - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java deleted file mode 100644 index f8da36592..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java +++ /dev/null @@ -1,838 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -import junit.framework.Assert; -import mvm.rya.indexing.external.ExternalProcessor; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.junit.Test; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.beust.jcommander.internal.Lists; -import com.google.common.collect.Sets; - -public class ThreshholdPlanSelectorTest { - - private String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - private String q8 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?l ."// - + " ?l ?c ."// - + " ?c ?e . "// - + "}";// - - private String q9 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - - - - private String q15 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - private String q16 = ""// - + "SELECT ?f ?m ?d ?e ?l " // - + "{" // - + " ?d ?f . "// - + " ?d ?e . "// - + " ?m ?d ."// - + " ?l ?d ."// - + "}";// - - private String q17 = ""// - + "SELECT ?chicken ?dog ?cat " // - + "{" // - + " ?chicken ?dog . "// - + " ?cat ?chicken ."// - + "}";// - - private String q18 = ""// - + "SELECT ?dog ?chicken " // - + "{" // - + " ?chicken ?dog . "// - + "}";// - - private String q19 = ""// - + "SELECT ?cat ?chicken " // - + "{" // - + " ?cat ?chicken ."// - + "}";// - - - private String q20 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - - - private String q21 = ""// - + "SELECT ?u ?s ?t " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - @Test - public void testSingleIndex() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q15, null); - ParsedQuery pq2 = parser.parseQuery(q7, null); - ParsedQuery pq3 = parser.parseQuery(q8, null); - ParsedQuery pq4 = parser.parseQuery(q9, null); - //ParsedQuery pq3 = parser.parseQuery(q12, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup1); - - List optTupNodes = Lists.newArrayList(); - optTupNodes.add(extTup2); - optTupNodes.add(extTup3); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - - Iterator plans = (new TupleExecutionPlanGenerator()).getPlans(iep.getIndexedTuples()); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - - Iterator validPlans = ipv.getValidTuples(plans); - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - - TupleExpr optimalTup = tps.getThreshholdQueryPlan(validPlans, .1, 1, 0, 0); - - NodeCollector nc = new NodeCollector(); - optimalTup.visit(nc); - - List qNodes = nc.getNodes(); - - - Assert.assertEquals(qNodes.size(), optTupNodes.size()); - for(QueryModelNode node: qNodes) { - Assert.assertTrue(optTupNodes.contains(node)); - } - - - } - - - - - - @Test - public void testSingleIndex2() throws Exception { - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?c a ?l ."// - + " ?d ?f . "// - + " ?e ?c . "// - + " ?m ?d ."// - + " ?l ?e ."// - + " ?m ?e . "// - + "}";// - - String q2 = ""// - + "SELECT ?u ?s ?t " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - String q3 = ""// - + "SELECT ?e ?c ?l " // - + "{" // - + " ?c a ?l ."// - + " ?l ?e ."// - + " ?e ?c . "// - + "}";// - - String q4 = ""// - + "SELECT ?d ?f ?m " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - ParsedQuery pq3 = parser.parseQuery(q3, null); - ParsedQuery pq4 = parser.parseQuery(q4, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup1); - - List spList = StatementPatternCollector.process(pq1.getTupleExpr()); - List optTupNodes = Lists.newArrayList(); - optTupNodes.add(extTup3); - optTupNodes.add(spList.get(6)); - optTupNodes.add(extTup2); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - - Iterator plans = (new TupleExecutionPlanGenerator()).getPlans(iep.getIndexedTuples()); - - //System.out.println("Size is " + plans.size()); - // System.out.println("Plans are " + plans); - - IndexPlanValidator ipv = new IndexPlanValidator(true); - Iterator validPlans = ipv.getValidTuples(plans); - - //System.out.println("Valid plan size is " + validPlans.size()); - // System.out.println("Valid plans are " + validPlans); - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - - TupleExpr optimalTup = tps.getThreshholdQueryPlan(validPlans, .4, .7, .1, .2); - - NodeCollector nc = new NodeCollector(); - optimalTup.visit(nc); - - //System.out.println("Optimal plan is " + optimalTup); - - List qNodes = nc.getNodes(); - //System.out.println("Returned list is " + qNodes + " and comp list is " + optTupNodes); - - Assert.assertTrue(qNodes.equals(optTupNodes)); - - } - - - - - - - - - @Test - public void testTwoIndex() throws Exception { - - String q1 = ""// - + "SELECT ?f ?m ?d ?h ?i " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?d ?f ." // - + " ?f ?h ." // - + " ?f ?i ." // - + " ?i ?h ." // - + "}";// - - String q2 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - - String q5 = ""// - + "SELECT ?m ?f ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - - String q6 = ""// - + "SELECT ?d ?f ?h " // - + "{" // - + " ?d ?f ." // - + " ?f ?h ." // - + "}";// - - String q7 = ""// - + "SELECT ?f ?i ?h " // - + "{" // - + " ?f ?i ." // - + " ?i ?h ." // - + "}";// - - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - ParsedQuery pq3 = parser.parseQuery(q3, null); - ParsedQuery pq4 = parser.parseQuery(q4, null); - ParsedQuery pq5 = parser.parseQuery(q5, null); - ParsedQuery pq6 = parser.parseQuery(q6, null); - ParsedQuery pq7 = parser.parseQuery(q7, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - SimpleExternalTupleSet extTup5 = new SimpleExternalTupleSet((Projection) pq6.getTupleExpr()); - SimpleExternalTupleSet extTup6 = new SimpleExternalTupleSet((Projection) pq7.getTupleExpr()); - - - - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - - List optTupNodes = Lists.newArrayList(); - optTupNodes.add(extTup4); - optTupNodes.add(extTup6); - optTupNodes.add(extTup5); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - - Iterator plans = (new TupleExecutionPlanGenerator()).getPlans(iep.getIndexedTuples()); - IndexPlanValidator ipv = new IndexPlanValidator(true); - Iterator validPlans = ipv.getValidTuples(plans); - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - TupleExpr optimalTup = tps.getThreshholdQueryPlan(validPlans, .2, .6, .4, 0); - - NodeCollector nc = new NodeCollector(); - optimalTup.visit(nc); - - List qNodes = nc.getNodes(); - - Assert.assertTrue(qNodes.equals(optTupNodes)); - - } - - - - - @Test - public void largeQueryFourtyIndexTest() { - - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - String q3 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + "}";// - - - - String q4 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g ?a ?b ?c" // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?c ?a . "// - + "}";// - - - String q5 = ""// - + "SELECT ?f ?m ?d ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?a a ?h ."// - + " ?h ?r ."// - + " ?r ?a . "// - + "}";// - - String q6 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?l ."// - + " ?l ?c ."// - + " ?c ?e . "// - + "}";// - - String q7 = ""// - + "SELECT ?n ?o ?p " // - + "{" // - + " ?n a ?o ."// - + " ?o ?p ."// - + " ?p ?n . "// - + "}";// - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - ParsedQuery pq6 = null; - ParsedQuery pq7 = null; - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - pq6 = parser.parseQuery(q6, null); - pq7 = parser.parseQuery(q7, null); - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - SimpleExternalTupleSet extTup5 = new SimpleExternalTupleSet((Projection) pq6.getTupleExpr()); - SimpleExternalTupleSet extTup6 = new SimpleExternalTupleSet((Projection) pq7.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - - List list2 = new ArrayList(); - - list2.add(extTup4); - list2.add(extTup5); - list2.add(extTup6); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - - Iterator plans = (new TupleExecutionPlanGenerator()).getPlans(iep.getIndexedTuples()); - IndexPlanValidator ipv = new IndexPlanValidator(false); - Iterator validPlans = ipv.getValidTuples(plans); - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - TupleExpr optimalTup = tps.getThreshholdQueryPlan(validPlans, .4, .8, .1, .1); - - NodeCollector nc = new NodeCollector(); - optimalTup.visit(nc); - - - - } - - - - - - - - - @Test - public void twoIndexFilterTest() { - - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " Filter(?f > \"5\")." // - + " Filter(?e > \"5\")." // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > \"5\") ."// - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - String q4 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " Filter(?f > \"5\") ."// - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - - String q5 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " Filter(?e > \"5\") ."// - + " ?e a ?l ."// - + " ?l ?c ."// - + " ?c ?e . "// - + "}";// - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - - List list2 = new ArrayList(); - - list2.add(extTup3); - list2.add(extTup4); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - - Iterator plans = (new TupleExecutionPlanGenerator()).getPlans(iep.getIndexedTuples()); - IndexPlanValidator ipv = new IndexPlanValidator(false); - Iterator validPlans = ipv.getValidTuples(plans); - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - TupleExpr optimalTup = tps.getThreshholdQueryPlan(validPlans, .4, .8, .1, .1); - - NodeCollector nc = new NodeCollector(); - optimalTup.visit(nc); - - Assert.assertEquals(nc.getNodes().size(), list2.size()); - - for(QueryModelNode e: nc.getNodes()) { - Assert.assertTrue(list2.contains((ExternalTupleSet)e)); - } - - - - } - - - - - - - - - - @Test - public void testCost1() throws Exception { - - String q1 = ""// - + "SELECT ?f ?m ?d ?h ?i " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?d ?f ." // - + " ?f ?h ." // - + " ?f ?i ." // - + " ?i ?h ." // - + "}";// - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - double cost = tps.getCost(pq1.getTupleExpr(), .6, .4, 0); - Assert.assertEquals(.7,cost); - - } - - - - - @Test - public void testCost2() throws Exception { - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - double cost = tps.getCost(pq1.getTupleExpr(), .4, .3, .3); - Assert.assertEquals(.58,cost, .000000001); - - } - - - - - - @Test - public void testCost3() throws Exception { - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " Filter(?f > \"5\")." // - + " Filter(?e > \"6\")." // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - - SimpleExternalTupleSet sep = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - List eList = Lists.newArrayList(); - - eList.add(sep); - ExternalProcessor ep = new ExternalProcessor(eList); - - TupleExpr te = pq1.getTupleExpr(); - te = ep.process(te); - - - - ThreshholdPlanSelector tps = new ThreshholdPlanSelector(pq1.getTupleExpr()); - double cost = tps.getCost(te, .4, .3, .3); - Assert.assertEquals(.575,cost); - - - } - - - - - - - - - - public static class NodeCollector extends QueryModelVisitorBase { - - List qNodes = Lists.newArrayList(); - - - public List getNodes() { - return qNodes; - } - - - - @Override - public void meetNode(QueryModelNode node) { - if(node instanceof StatementPattern || node instanceof ExternalTupleSet) { - qNodes.add(node); - } - super.meetNode(node); - - } - - - } - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java deleted file mode 100644 index ffb7b2db6..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java +++ /dev/null @@ -1,364 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.*; - -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -import junit.framework.Assert; - -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.beust.jcommander.internal.Lists; -import com.beust.jcommander.internal.Sets; - -public class TupleExecutionPlanGeneratorTest { - - - - private String q1 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - private String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?t ?u ."// - + " ?s a ?t ."// - + " ?u ?s . "// - + "}";// - - - private String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?u ?s . "// - + " ?t ?u ."// - + " ?s a ?t ."// - + "}";// - - - private String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?u ?s . "// - + " ?t ?u ."// - + "}";// - - - private String q5 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s a ?t ."// - + "}";// - - - private String q6 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?u ?s . "// - + " ?s a ?t ."// - + " ?t ?u ."// - + "}";// - - - - - private String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + "}";// - - - private String q8 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?t ?u ."// - + " ?s a ?t ."// - + "}";// - - - - - - - private String q9 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?t > 2). "// - + " Filter(?s > 1). "// - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - private String q10 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?t > 2). "// - + " Filter(?s > 1). "// - + " ?t ?u ."// - + " ?s a ?t ."// - + " ?u ?s . "// - + "}";// - - - private String q11 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?t > 2). "// - + " Filter(?s > 1). "// - + " ?u ?s . "// - + " ?t ?u ."// - + " ?s a ?t ."// - + "}";// - - - private String q12 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?t > 2). "// - + " Filter(?s > 1). "// - + " ?s a ?t ."// - + " ?u ?s . "// - + " ?t ?u ."// - + "}";// - - - private String q13 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?t > 2). "// - + " Filter(?s > 1). "// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s a ?t ."// - + "}";// - - - private String q14 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?t > 2). "// - + " Filter(?s > 1). "// - + " ?u ?s . "// - + " ?s a ?t ."// - + " ?t ?u ."// - + "}";// - - - private String q15 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > 1). "// - + " Filter(?t > 2). "// - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - - @Test - public void testTwoNodeOrder() { - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - - try { - pq1 = parser.parseQuery(q7, null); - pq2 = parser.parseQuery(q8, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - Set tupSet = Sets.newHashSet(); - tupSet.add(pq1.getTupleExpr()); - - TupleExecutionPlanGenerator tep = new TupleExecutionPlanGenerator(); - Iterator processedTups = tep.getPlans(tupSet.iterator()); - - List processedTupList = Lists.newArrayList(); - - int size = 0; - - while(processedTups.hasNext()) { - Assert.assertTrue(processedTups.hasNext()); - processedTupList.add(processedTups.next()); - size++; - } - - Assert.assertEquals(2, size); - - Assert.assertTrue(processedTupList.get(0).equals(pq2.getTupleExpr())); - Assert.assertTrue(processedTupList.get(1).equals(pq1.getTupleExpr())); - - } - - - - - - @Test - public void testThreeNodeOrder() { - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - ParsedQuery pq6 = null; - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - pq6 = parser.parseQuery(q6, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - Set tupSet = Sets.newHashSet(); - tupSet.add(pq1.getTupleExpr()); - - TupleExecutionPlanGenerator tep = new TupleExecutionPlanGenerator(); - Iterator processedTups= tep.getPlans(tupSet.iterator()); - - List processedTupList = Lists.newArrayList(); - - int size = 0; - - while(processedTups.hasNext()) { - Assert.assertTrue(processedTups.hasNext()); - processedTupList.add(processedTups.next()); - size++; - } - - Assert.assertTrue(!processedTups.hasNext()); - Assert.assertEquals(6, size); - - Assert.assertTrue(processedTupList.get(5).equals(pq1.getTupleExpr())); - Assert.assertTrue(processedTupList.get(0).equals(pq2.getTupleExpr())); - Assert.assertTrue(processedTupList.get(2).equals(pq3.getTupleExpr())); - Assert.assertTrue(processedTupList.get(4).equals(pq4.getTupleExpr())); - Assert.assertTrue(processedTupList.get(1).equals(pq5.getTupleExpr())); - Assert.assertTrue(processedTupList.get(3).equals(pq6.getTupleExpr())); - - } - - - - @Test - public void testThreeNodeOrderFilter() { - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - ParsedQuery pq6 = null; - ParsedQuery pq7 = null; - - try { - pq1 = parser.parseQuery(q9, null); - pq2 = parser.parseQuery(q10, null); - pq3 = parser.parseQuery(q11, null); - pq4 = parser.parseQuery(q12, null); - pq5 = parser.parseQuery(q13, null); - pq6 = parser.parseQuery(q14, null); - pq7 = parser.parseQuery(q15, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - System.out.println(pq1.getTupleExpr()); - - Set tupSet = Sets.newHashSet(); - tupSet.add(pq7.getTupleExpr()); - - TupleExecutionPlanGenerator tep = new TupleExecutionPlanGenerator(); - Iterator processedTups= tep.getPlans(tupSet.iterator()); - - List processedTupList = Lists.newArrayList(); - - int size = 0; - - while(processedTups.hasNext()) { - - Assert.assertTrue(processedTups.hasNext()); - TupleExpr te = processedTups.next(); - processedTupList.add(te); - System.out.println("Processed tups are " + te); - size++; - } - - Assert.assertTrue(!processedTups.hasNext()); - Assert.assertEquals(6, size); - - Assert.assertTrue(processedTupList.get(5).equals(pq1.getTupleExpr())); - Assert.assertTrue(processedTupList.get(0).equals(pq2.getTupleExpr())); - Assert.assertTrue(processedTupList.get(2).equals(pq3.getTupleExpr())); - Assert.assertTrue(processedTupList.get(4).equals(pq4.getTupleExpr())); - Assert.assertTrue(processedTupList.get(1).equals(pq5.getTupleExpr())); - Assert.assertTrue(processedTupList.get(3).equals(pq6.getTupleExpr())); - - } - - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java deleted file mode 100644 index 96466cbb7..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java +++ /dev/null @@ -1,141 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; - -import junit.framework.Assert; - -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -public class TupleReArrangerTest { - - @Test - public void tupleReArrangeTest1() throws MalformedQueryException { - - String queryString = ""// - + "SELECT ?a ?b ?c ?d ?e" // - + "{" // - + "{ ?a a ?b . ?a ?c }"// - + " UNION { ?a ?d . ?a ?e }"// - + "}";// - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(queryString, null); - List tuples = TupleReArranger.getTupleReOrderings(pq.getTupleExpr()); - - Assert.assertEquals(4, tuples.size()); - - } - - - - @Test - public void tupleReArrangeTest2() throws MalformedQueryException { - - String queryString = ""// - + "SELECT ?a ?b ?c ?d ?e ?x ?y" // - + "{" // - + " ?e ?x ." // - + " ?e ?y . "// - + "{ ?a a ?b . ?a ?c }"// - + " UNION { ?a ?d . ?a ?e }"// - + "}";// - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(queryString, null); - List tuples = TupleReArranger.getTupleReOrderings(pq.getTupleExpr()); - - - Assert.assertEquals(24, tuples.size()); - - } - - - - - - @Test - public void tupleReArrangeTest3() throws MalformedQueryException { - - String queryString = ""// - + "SELECT ?a ?b ?c ?d ?e ?x ?y" // - + "{" // - + " Filter(?c = )" // - + " Filter(?x = ) "// - + " ?e ?x ." // - + " ?e ?y . "// - + "{ ?a a ?b . ?a ?c }"// - + " UNION { ?a ?d . ?a ?e }"// - + "}";// - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(queryString, null); - List tuples = TupleReArranger.getTupleReOrderings(pq.getTupleExpr()); - - Assert.assertEquals(24, tuples.size()); - - } - - - - - - - - @Test - public void tupleReArrangeTest4() throws MalformedQueryException { - - String queryString = ""// - + "SELECT ?a ?b ?c ?d ?e ?x ?y" // - + "{" // - + " Filter(?c = )" // - + " Filter(?x = ) "// - + " Filter(?d = ) " // - + " ?e ?x ." // - + " ?e ?y . "// - + "{ ?a a ?b . ?a ?c }"// - + " UNION { ?a ?d . ?a ?e }"// - + "}";// - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(queryString, null); - TupleExpr te = pq.getTupleExpr(); - (new FilterOptimizer()).optimize(te, null, null); - System.out.println(te); - List tuples = TupleReArranger.getTupleReOrderings(te); - System.out.println(tuples); - - Assert.assertEquals(24, tuples.size()); - - } - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java deleted file mode 100644 index 38f781375..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java +++ /dev/null @@ -1,620 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import junit.framework.Assert; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; -import com.google.common.collect.Lists; - - -public class ValidIndexCombinationGeneratorTest { - - - - - - - @Test - public void singleIndex() { - String q1 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?f ?m ." // - + " ?m ?d ." // - + " ?f ?m ." // - + " ?m ?d ." // - + "}";// - - - - - - - SPARQLParser parser = new SPARQLParser(); - ParsedQuery pq1 = null; - - - SimpleExternalTupleSet extTup1 = null; - - - - - - - try { - pq1 = parser.parseQuery(q1, null); - - - - extTup1 = new SimpleExternalTupleSet((Projection) pq1.getTupleExpr()); - - - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - List indexList = Lists.newArrayList(); - indexList.add(extTup1); - - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> combos = vic.getValidIndexCombos(indexList); - int size = 0; - while(combos.hasNext()) { - combos.hasNext(); - size++; - combos.next(); - combos.hasNext(); - } - - Assert.assertTrue(!combos.hasNext()); - Assert.assertEquals(1,size); - - - } - - - - - - - @Test - public void medQueryEightOverlapIndex() { - String q1 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?f ?m ." // - + " ?m ?d ." // - + " ?f ?m ." // - + " ?m ?d ." // - + "}";// - - - String q2 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - String q5 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q6 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - - String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + " ?t ?u ." // - + "}";// - - - - String q8 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?s ?t ." // - + "}";// - - - String q9 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + "}";// - - - - - - - - - - SPARQLParser parser = new SPARQLParser(); - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - ParsedQuery pq6 = null; - ParsedQuery pq7 = null; - ParsedQuery pq8 = null; - ParsedQuery pq9 = null; - - SimpleExternalTupleSet extTup1 = null; - SimpleExternalTupleSet extTup2 = null; - SimpleExternalTupleSet extTup3 = null; - SimpleExternalTupleSet extTup4 = null; - SimpleExternalTupleSet extTup5 = null; - SimpleExternalTupleSet extTup6 = null; - SimpleExternalTupleSet extTup7 = null; - SimpleExternalTupleSet extTup8 = null; - - - - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - pq6 = parser.parseQuery(q6, null); - pq7 = parser.parseQuery(q7, null); - pq8 = parser.parseQuery(q8, null); - pq9 = parser.parseQuery(q9, null); - - - extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - extTup5 = new SimpleExternalTupleSet((Projection) pq6.getTupleExpr()); - extTup6 = new SimpleExternalTupleSet((Projection) pq7.getTupleExpr()); - extTup7 = new SimpleExternalTupleSet((Projection) pq8.getTupleExpr()); - extTup8 = new SimpleExternalTupleSet((Projection) pq9.getTupleExpr()); - - - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - List indexList = Lists.newArrayList(); - indexList.add(extTup1); - indexList.add(extTup2); - indexList.add(extTup3); - indexList.add(extTup4); - indexList.add(extTup5); - indexList.add(extTup6); - indexList.add(extTup7); - indexList.add(extTup8); - - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> combos = vic.getValidIndexCombos(indexList); - int size = 0; - while(combos.hasNext()) { - combos.hasNext(); - size++; - combos.next(); - combos.hasNext(); - } - - Assert.assertTrue(!combos.hasNext()); - Assert.assertEquals(21,size); - - - } - - - - - - @Test - public void largeQuerySixteenIndexTest() { - - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - String q3 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + "}";// - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - - - Assert.assertEquals(16, indexSet.size()); - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> eSet = vic.getValidIndexCombos(Lists.newArrayList(indexSet)); - - int size = 0; - while(eSet.hasNext()) { - size++; - Assert.assertTrue(eSet.hasNext()); - eSet.next(); - } - - - Assert.assertTrue(!eSet.hasNext()); - Assert.assertEquals(75, size); - - } - - - - - - - @Test - public void largeQueryFourtyIndexTest() { - - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - String q3 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + "}";// - - - - String q4 = ""// - + "SELECT ?s ?t ?u ?d ?f ?g ?a ?b ?c" // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + " ?d a ?f ."// - + " ?f ?g ."// - + " ?g ?d . "// - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?c ?a . "// - + "}";// - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - Assert.assertEquals(40, indexSet.size()); - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> eSet = vic.getValidIndexCombos(Lists.newArrayList(indexSet)); - - int size = 0; - while(eSet.hasNext()) { - size++; - Assert.assertTrue(eSet.hasNext()); - eSet.next(); - } - - Assert.assertTrue(!eSet.hasNext()); - Assert.assertEquals(123, size); - } - - - - - - @Test - public void overlappingFilterIndex() { - - - String q5 = ""// - + "SELECT ?s ?t " // - + "{" // - + " ?s a \"Person\" ." // - + " ?t a \"Student\" ."// - + "}";// - - - String q4 = ""// - + "SELECT ?s ?t " // - + "{" // - + " ?s a ?t ."// - + " ?s ?t . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t " // - + "{" // - + " Filter(?s > 5). "// - + " ?s a ?t ."// - + " ?s ?t . "// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t " // - + "{" // - + " Filter(?s > 5). "// - + " ?s a \"Person\" ." // - + " ?t a \"Student\" ."// - + "}";// - - - - String q1 = ""// - + "SELECT ?s ?t " // - + "{" // - + " Filter(?s > 5). "// - + " ?s a ?t ."// - + " ?s ?t . "// - + " ?s a \"Person\" ." // - + " ?t a \"Student\" ."// - + "}";// - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - list.add(extTup4); - - - IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq1.getTupleExpr(), list); - List indexSet = iep.getNormalizedIndices(); - - - - Assert.assertEquals(4, indexSet.size()); - - ValidIndexCombinationGenerator vic = new ValidIndexCombinationGenerator(pq1.getTupleExpr()); - Iterator> eSet = vic.getValidIndexCombos(Lists.newArrayList(indexSet)); - - int size = 0; - while(eSet.hasNext()) { - size++; - Assert.assertTrue(eSet.hasNext()); - List eList = eSet.next(); - - } - - - Assert.assertTrue(!eSet.hasNext()); - Assert.assertEquals(7, size); - - } - - - - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java deleted file mode 100644 index 181d4fb1d..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java +++ /dev/null @@ -1,329 +0,0 @@ -package mvm.rya.indexing.IndexPlanValidator; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import mvm.rya.indexing.external.ExternalProcessor; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.ExternalProcessorTest.ExternalTupleVstor; - -import org.junit.Assert; -import org.junit.Test; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Sets; - -public class VarConstantIndexListPrunerTest { - - - - private String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - private String q8 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + "}";// - - - - - private String q11 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + "}";// - - - private String q12 = ""// - + "SELECT ?b ?p ?dog ?cat " // - + "{" // - + " ?b a ?p ."// - + " ?dog a ?cat. "// - + "}";// - - - - private String q13 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit ?dick ?jane ?betty " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + " ?dick ?jane . "// - + " ?jane ?betty . "// - + "}";// - - private String q14 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + " ?d ?a . "// - + "}";// - - - - private String q15 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > 1)."// - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - private String q16 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > 2)."// - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - private String q17 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?t > 1)."// - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - @Test - public void testTwoIndexLargeQuery() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q11, null); - ParsedQuery pq2 = parser.parseQuery(q7, null); - ParsedQuery pq3 = parser.parseQuery(q12, null); - ParsedQuery pq4 = parser.parseQuery(q13, null); - ParsedQuery pq5 = parser.parseQuery(q8, null); - ParsedQuery pq6 = parser.parseQuery(q14, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection)pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection)pq4.getTupleExpr()); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet((Projection)pq5.getTupleExpr()); - SimpleExternalTupleSet extTup5 = new SimpleExternalTupleSet((Projection)pq6.getTupleExpr()); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - list.add(extTup3); - list.add(extTup4); - list.add(extTup5); - - VarConstantIndexListPruner vci = new VarConstantIndexListPruner(pq1.getTupleExpr()); - Set processedIndexSet = vci.getRelevantIndices(list); - - System.out.println("Relevant indexes are: "); - for(ExternalTupleSet e: processedIndexSet) { - System.out.println(e); - } - - Set indexSet = Sets.newHashSet(); - indexSet.add(extTup1); - indexSet.add(extTup2); - indexSet.add(extTup4); - - Assert.assertTrue(Sets.intersection(indexSet, processedIndexSet).equals(processedIndexSet)); - - - - } - - - - - - @Test - public void testTwoIndexFilter1() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q15, null); - ParsedQuery pq2 = parser.parseQuery(q16, null); - ParsedQuery pq3 = parser.parseQuery(q17, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection)pq3.getTupleExpr()); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - - VarConstantIndexListPruner vci = new VarConstantIndexListPruner(pq1.getTupleExpr()); - Set processedIndexSet = vci.getRelevantIndices(list); - - System.out.println("Relevant indexes are: "); - for(ExternalTupleSet e: processedIndexSet) { - System.out.println(e); - } - - Set indexSet = Sets.newHashSet(); - indexSet.add(extTup2); - - - Assert.assertTrue(Sets.intersection(indexSet, processedIndexSet).equals(processedIndexSet)); - - - - } - - - - @Test - public void testTwoIndexFilter2() throws Exception { - - - String q18 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > 1 && ?t > 8)." // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q19 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > 1)." // - + " Filter(?t > 8)." // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q18, null); - ParsedQuery pq2 = parser.parseQuery(q19, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - - List list = new ArrayList(); - list.add(extTup1); - - VarConstantIndexListPruner vci = new VarConstantIndexListPruner(pq1.getTupleExpr()); - Set processedIndexSet = vci.getRelevantIndices(list); - - Assert.assertTrue(processedIndexSet.isEmpty()); - - - - } - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java deleted file mode 100644 index 88e28419f..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java +++ /dev/null @@ -1,106 +0,0 @@ -package mvm.rya.indexing.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.Assert; -import mvm.rya.indexing.accumulo.StatementSerializer; - -import org.junit.Test; -import org.openrdf.model.Statement; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; - -public class StatementSerializerTest { - - @Test - public void testSimpleStatementObjectUri() throws Exception { - ValueFactory vf = new ValueFactoryImpl(); - Statement s; - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createURI("foo:object")); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - s = new ContextStatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createURI("foo:object"), - vf.createURI("foo:context")); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - } - - @Test - public void testSimpleObjectLiteral() throws Exception { - ValueFactory vf = new ValueFactoryImpl(); - Statement s; - String str; - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createURI("foo:object")); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - str = "Alice Palace"; - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str)); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str, "en")); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str, vf.createURI("xsd:string"))); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - } - - @Test - public void testObjectLiteralWithDataTypeGarbage() throws Exception { - // test with some garbage in the literal that may throw off the parser - ValueFactory vf = new ValueFactoryImpl(); - Statement s; - String str; - - str = "Alice ^^\""; - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str)); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str, "en")); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str, vf.createURI("xsd:string"))); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - } - - @Test - public void testObjectLiteralWithAtSignGarbage() throws Exception { - // test with some garbage in the literal that may throw off the parser - ValueFactory vf = new ValueFactoryImpl(); - Statement s; - String str; - - str = "Alice @en"; - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str)); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str, "en")); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - - s = new StatementImpl(vf.createURI("foo:subject"), vf.createURI("foo:predicate"), vf.createLiteral(str, vf.createURI("xsd:string"))); - Assert.assertEquals(s, StatementSerializer.readStatement(StatementSerializer.writeStatement(s))); - } - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java deleted file mode 100644 index e7e06d9ae..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java +++ /dev/null @@ -1,2125 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import junit.framework.Assert; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.RyaTableMutationsFactory; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.layout.TablePrefixLayoutStrategy; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.indexing.accumulo.ConfigUtils; - -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Mutation; -import org.apache.hadoop.conf.Configuration; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.XMLSchema; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Lists; - -public class AccumuloDocIndexerTest { - - - private Connector accCon; - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - ValueFactory vf = new ValueFactoryImpl(); - - private static final String tableName = "EntityCentric_entity"; - - - @Before - public void init() throws Exception { - - accCon = new MockInstance("instance").getConnector("root", "".getBytes()); - - if(accCon.tableOperations().exists(tableName)) { - accCon.tableOperations().delete(tableName); - } - - accCon.tableOperations().create(tableName); - - - Configuration config = new Configuration(); - config.set(ConfigUtils.CLOUDBASE_AUTHS, "U"); - config.set(ConfigUtils.CLOUDBASE_INSTANCE, "instance"); - config.set(ConfigUtils.CLOUDBASE_USER, "root"); - config.set(ConfigUtils.CLOUDBASE_PASSWORD, ""); - - conf = new AccumuloRdfConfiguration(config); - conf.set(ConfigUtils.USE_MOCK_INSTANCE, "true"); - conf.setAdditionalIndexers(EntityCentricIndex.class); - - } - - - - - @Test - public void testNoContext1() throws Exception { - - BatchWriter bw = null; - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - conf.setTablePrefix("EntityCentric_"); - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - for (int i = 0; i < 20; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaType(XMLSchema.STRING, "cq1")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2")); - RyaStatement rs3 = null; - RyaStatement rs4 = null; - - if(i == 5 || i == 15) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i))); - rs4 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.STRING,Integer.toString(i))); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = null; - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs4 != null) { - serialize4 = rtm.serialize(rs4); - } - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - if (serialize4 != null) { - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X 5 ."// - + "}"; - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X \"15\" ."// - + "}"; - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te1 = pq1.getTupleExpr(); - TupleExpr te2 = pq2.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - List spList2 = StatementPatternCollector.process(te2); - - StarQuery sq1 = new StarQuery(spList1); - StarQuery sq2 = new StarQuery(spList2); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - List bsList = Lists.newArrayList(); - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("******************Test 1************************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(1, results); - - System.out.println("************************************************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - - Assert.assertEquals(1, results); - - - - adi.close(); - - - - - - - } - - - - - - - @Test - public void testNoContext2() throws Exception { - - BatchWriter bw = null; - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - conf.setTablePrefix("EntityCentric_"); - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i )); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2")); - RyaStatement rs3 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i))); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?Y1 ?X ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - - List bsList = Lists.newArrayList(); -// QueryBindingSet b1 = (new QueryBindingSet()); -// b1.addBinding("X", vf.createURI("uri:5")); -// QueryBindingSet b2 = (new QueryBindingSet()); -// b2.addBinding("X", vf.createURI("uri:15")); -// QueryBindingSet b3 = (new QueryBindingSet()); -// b3.addBinding("X", vf.createURI("uri:25")); -// bsList.add(b1); -// bsList.add(b2); -// bsList.add(b3); - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 2***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(5, results); - - - adi.close(); - - - - } - - - - - - - - - - - - - - - - @Test - public void testNoContextCommonVarBs() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaType(XMLSchema.STRING, "cq1")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2")); - RyaStatement rs3 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i))); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = (new QueryBindingSet()); - b1.addBinding("X", vf.createURI("uri:5")); - QueryBindingSet b2 = (new QueryBindingSet()); - b2.addBinding("X", vf.createURI("uri:15")); - QueryBindingSet b3 = (new QueryBindingSet()); - b3.addBinding("X", vf.createURI("uri:25")); - bsList.add(b1); - bsList.add(b2); - bsList.add(b3); - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 3***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(3, results); - - - adi.close(); - - - - } - - - - - - @Test - public void testNoContextUnCommonVarBs() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaType(XMLSchema.STRING, "cq1")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2")); - RyaStatement rs3 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i))); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - Value v1 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(5))); - Value v2 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(25))); - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = (new QueryBindingSet()); - b1.addBinding("Y3", v1); - QueryBindingSet b2 = (new QueryBindingSet()); - b2.addBinding("Y3", v2); - bsList.add(b1); - bsList.add(b2); - - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 4***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(2, results); - - - adi.close(); - - - - } - - - - @Test - public void testNoContextCommonVarBs2() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i )); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2")); - RyaStatement rs3 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i))); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?Y1 ?X ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = (new QueryBindingSet()); - b1.addBinding("X", vf.createURI("uri:5")); - QueryBindingSet b2 = (new QueryBindingSet()); - b2.addBinding("X", vf.createURI("uri:15")); - QueryBindingSet b3 = (new QueryBindingSet()); - b3.addBinding("X", vf.createURI("uri:25")); - bsList.add(b1); - bsList.add(b2); - bsList.add(b3); - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 5***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(3, results); - - - adi.close(); - - - - } - - - - - - @Test - public void testNoContextUnCommonVarBs2() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i )); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2")); - RyaStatement rs3 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i))); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?Y1 ?X ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = (new QueryBindingSet()); - b1.addBinding("X", vf.createURI("uri:5")); - QueryBindingSet b2 = (new QueryBindingSet()); - b2.addBinding("X", vf.createURI("uri:15")); - QueryBindingSet b3 = (new QueryBindingSet()); - b3.addBinding("X", vf.createURI("uri:25")); - bsList.add(b1); - bsList.add(b2); - bsList.add(b3); - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 6***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(3, results); - - - adi.close(); - - - - } - - - - - - - - @Test - public void testContext2() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i ), new RyaURI("uri:joe")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe")); - RyaStatement rs3 = null; - - RyaStatement rs4 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i ), new RyaURI("uri:hank")); - RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank")); - RyaStatement rs6 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:joe")); - rs6 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:hank")); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = rtm.serialize(rs4); - Map> serialize5 = rtm.serialize(rs5); - Map> serialize6 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs6 != null) { - serialize6 = rtm.serialize(rs6); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - Collection m5 = EntityCentricIndex.createMutations(rs5); - for (Mutation m : m5) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m6 = EntityCentricIndex.createMutations(rs6); - for (Mutation m : m6) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + " GRAPH { " // - + "?Y1 ?X ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + " } "// - + "}"; - - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?Y1 ?X ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - - List bsList = Lists.newArrayList(); -// QueryBindingSet b1 = (new QueryBindingSet()); -// b1.addBinding("X", vf.createURI("uri:5")); -// QueryBindingSet b2 = (new QueryBindingSet()); -// b2.addBinding("X", vf.createURI("uri:15")); -// QueryBindingSet b3 = (new QueryBindingSet()); -// b3.addBinding("X", vf.createURI("uri:25")); -// bsList.add(b1); -// bsList.add(b2); -// bsList.add(b3); - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 7***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(5, results); - - - - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te2 = pq2.getTupleExpr(); - - List spList2 = StatementPatternCollector.process(te2); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList2)); - - StarQuery sq2 = new StarQuery(spList2); - - - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("**********************TEST 7***********************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - Assert.assertEquals(10, results); - - - adi.close(); - - - - } - - - - - - - @Test - public void testContextUnCommonVarBs2() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i ), new RyaURI("uri:joe")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe")); - RyaStatement rs3 = null; - - RyaStatement rs4 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i ), new RyaURI("uri:hank")); - RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank")); - RyaStatement rs6 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:joe")); - rs6 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:hank")); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = rtm.serialize(rs4); - Map> serialize5 = rtm.serialize(rs5); - Map> serialize6 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs6 != null) { - serialize6 = rtm.serialize(rs6); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - Collection m5 = EntityCentricIndex.createMutations(rs5); - for (Mutation m : m5) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m6 = EntityCentricIndex.createMutations(rs6); - for (Mutation m : m6) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + " GRAPH { " // - + "?Y1 ?X ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + " } "// - + "}"; - - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?Y1 ?X ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = (new QueryBindingSet()); - b1.addBinding("X", vf.createURI("uri:5")); - QueryBindingSet b2 = (new QueryBindingSet()); - b2.addBinding("X", vf.createURI("uri:15")); - QueryBindingSet b3 = (new QueryBindingSet()); - b3.addBinding("X", vf.createURI("uri:25")); - bsList.add(b1); - bsList.add(b2); - bsList.add(b3); - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 8***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(3, results); - - - - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te2 = pq2.getTupleExpr(); - - List spList2 = StatementPatternCollector.process(te2); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList2)); - - StarQuery sq2 = new StarQuery(spList2); - - - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("**********************TEST 8***********************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - Assert.assertEquals(6, results); - - - adi.close(); - - - - - } - - - @Test - public void testContext1() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:joe")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe")); - RyaStatement rs3 = null; - - RyaStatement rs4 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:hank")); - RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank")); - RyaStatement rs6 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:joe")); - rs6 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:hank")); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = rtm.serialize(rs4); - Map> serialize5 = rtm.serialize(rs5); - Map> serialize6 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs6 != null) { - serialize6 = rtm.serialize(rs6); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - Collection m5 = EntityCentricIndex.createMutations(rs5); - for (Mutation m : m5) { - bw.addMutation(m); - } - if (serialize6 != null) { - Collection m6 = EntityCentricIndex.createMutations(rs6); - for (Mutation m : m6) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + " GRAPH { " // - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + " } "// - + "}"; - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - -// Value v1 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(5))); -// Value v2 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(25))); - - List bsList = Lists.newArrayList(); -// QueryBindingSet b1 = (new QueryBindingSet()); -// b1.addBinding("Y3", v1); -// QueryBindingSet b2 = (new QueryBindingSet()); -// b2.addBinding("Y3", v2); -// bsList.add(b1); -// bsList.add(b2); -// - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 10***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(10, results); - - - - - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te2 = pq2.getTupleExpr(); - - List spList2 = StatementPatternCollector.process(te2); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList2)); - - StarQuery sq2 = new StarQuery(spList2); - - - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("**********************TEST 10***********************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - Assert.assertEquals(5, results); - - - adi.close(); - - - - - } - - - - - - - - - @Test - public void testContextUnCommonVarBs1() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:joe")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe")); - RyaStatement rs3 = null; - - RyaStatement rs4 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:hank")); - RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank")); - RyaStatement rs6 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:joe")); - rs6 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:hank")); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = rtm.serialize(rs4); - Map> serialize5 = rtm.serialize(rs5); - Map> serialize6 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs6 != null) { - serialize6 = rtm.serialize(rs6); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - Collection m5 = EntityCentricIndex.createMutations(rs5); - for (Mutation m : m5) { - bw.addMutation(m); - } - if (serialize6 != null) { - Collection m6 = EntityCentricIndex.createMutations(rs6); - for (Mutation m : m6) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + " GRAPH { " // - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + " } "// - + "}"; - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - Value v1 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(5))); - Value v2 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(25))); - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = (new QueryBindingSet()); - b1.addBinding("Y3", v1); - QueryBindingSet b2 = (new QueryBindingSet()); - b2.addBinding("Y3", v2); - bsList.add(b1); - bsList.add(b2); - - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 11***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(4, results); - - - - - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te2 = pq2.getTupleExpr(); - - List spList2 = StatementPatternCollector.process(te2); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList2)); - - StarQuery sq2 = new StarQuery(spList2); - - - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("**********************TEST 11***********************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - Assert.assertEquals(2, results); - - - adi.close(); - - - - - } - - - - - - - - - - - @Test - public void testContextCommonVarBs1() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:joe")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe")); - RyaStatement rs3 = null; - - RyaStatement rs4 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:hank")); - RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank")); - RyaStatement rs6 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:joe")); - rs6 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:hank")); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = rtm.serialize(rs4); - Map> serialize5 = rtm.serialize(rs5); - Map> serialize6 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs6 != null) { - serialize6 = rtm.serialize(rs6); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - Collection m5 = EntityCentricIndex.createMutations(rs5); - for (Mutation m : m5) { - bw.addMutation(m); - } - if (serialize6 != null) { - Collection m6 = EntityCentricIndex.createMutations(rs6); - for (Mutation m : m6) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + " GRAPH { " // - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + " } "// - + "}"; - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - Value v1 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(5))); - Value v2 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(25))); - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = (new QueryBindingSet()); - b1.addBinding("X", vf.createURI("uri:5")); - QueryBindingSet b2 = (new QueryBindingSet()); - b2.addBinding("X", vf.createURI("uri:15")); - QueryBindingSet b3 = (new QueryBindingSet()); - b3.addBinding("X", vf.createURI("uri:25")); - bsList.add(b1); - bsList.add(b2); - bsList.add(b3); - - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 12***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(6, results); - - - - - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te2 = pq2.getTupleExpr(); - - List spList2 = StatementPatternCollector.process(te2); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList2)); - - StarQuery sq2 = new StarQuery(spList2); - - - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("**********************TEST 12***********************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - Assert.assertEquals(3, results); - - - adi.close(); - - - - - } - - - - - @Test - public void testContextCommonAndUnCommonVarBs1() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:joe")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe")); - RyaStatement rs3 = null; - - RyaStatement rs4 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:hank")); - RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank")); - RyaStatement rs6 = null; - - RyaStatement rs7 = null; - RyaStatement rs8 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:joe")); - rs6 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:hank")); - rs7 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(100+i)), new RyaURI("uri:joe")); - rs8 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(100+i)), new RyaURI("uri:hank")); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = rtm.serialize(rs4); - Map> serialize5 = rtm.serialize(rs5); - Map> serialize6 = null; - Map> serialize7 = null; - Map> serialize8 = null; - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs6 != null) { - serialize6 = rtm.serialize(rs6); - } - - if(rs7 != null) { - serialize7 = rtm.serialize(rs7); - } - - if(rs8 != null) { - serialize8 = rtm.serialize(rs8); - } - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - Collection m5 = EntityCentricIndex.createMutations(rs5); - for (Mutation m : m5) { - bw.addMutation(m); - } - if (serialize6 != null) { - Collection m6 = EntityCentricIndex.createMutations(rs6); - for (Mutation m : m6) { - bw.addMutation(m); - } - } - - if (serialize7 != null) { - Collection m7 = EntityCentricIndex.createMutations(rs7); - for (Mutation m : m7) { - bw.addMutation(m); - } - } - if (serialize8 != null) { - Collection m8 = EntityCentricIndex.createMutations(rs8); - for (Mutation m : m8) { - bw.addMutation(m); - } - } - - - } - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}"; - - - - String q2 = "" // - + "SELECT ?X ?Y1 ?Y2 ?Y3 " // - + "{"// - + " GRAPH { " // - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + " } "// - + "}"; - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - - Value v1 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(105))); - Value v2 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(125))); - - List bsList = Lists.newArrayList(); - QueryBindingSet b1 = new QueryBindingSet(); - b1.addBinding("X", vf.createURI("uri:5")); - b1.addBinding("Y3", v1); - QueryBindingSet b2 = new QueryBindingSet(); - b2.addBinding("X", vf.createURI("uri:25")); - b2.addBinding("Y3", v2); - bsList.add(b1); - bsList.add(b2); - - - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 13***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(4, results); - - - - - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te2 = pq2.getTupleExpr(); - - List spList2 = StatementPatternCollector.process(te2); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList2)); - - StarQuery sq2 = new StarQuery(spList2); - - - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("**********************TEST 13***********************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - Assert.assertEquals(2, results); - - - adi.close(); - - - - - } - - - - - - - - - @Test - public void testContextConstantCommonVar() throws Exception { - - BatchWriter bw = null; - conf.setTablePrefix("EntityCentric_"); - RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf)); - - - bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30); - - - - for (int i = 0; i < 30; i++) { - - - RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:joe")); - RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe")); - RyaStatement rs3 = null; - - RyaStatement rs4 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:hank")); - RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i ), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank")); - RyaStatement rs6 = null; - - - if(i == 5 || i == 10 || i == 15 || i == 20 || i == 25) { - rs3 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:joe")); - rs6 = new RyaStatement(new RyaURI("uri:" +i ), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER,Integer.toString(i)), new RyaURI("uri:hank")); - } - - Map> serialize1 = rtm.serialize(rs1); - Map> serialize2 = rtm.serialize(rs2); - Map> serialize3 = null; - Map> serialize4 = rtm.serialize(rs4); - Map> serialize5 = rtm.serialize(rs5); - Map> serialize6 = null; - - - if(rs3 != null) { - serialize3 = rtm.serialize(rs3); - } - - if(rs6 != null) { - serialize6 = rtm.serialize(rs6); - } - - - - Collection m1 = EntityCentricIndex.createMutations(rs1); - for (Mutation m : m1) { - bw.addMutation(m); - } - Collection m2 = EntityCentricIndex.createMutations(rs2); - for (Mutation m : m2) { - bw.addMutation(m); - } - if (serialize3 != null) { - Collection m3 = EntityCentricIndex.createMutations(rs3); - for (Mutation m : m3) { - bw.addMutation(m); - } - } - - Collection m4 = EntityCentricIndex.createMutations(rs4); - for (Mutation m : m4) { - bw.addMutation(m); - } - Collection m5 = EntityCentricIndex.createMutations(rs5); - for (Mutation m : m5) { - bw.addMutation(m); - } - if (serialize6 != null) { - Collection m6 = EntityCentricIndex.createMutations(rs6); - for (Mutation m : m6) { - bw.addMutation(m); - } - } - - - - - - } - - String q1 = "" // - + "SELECT ?Y1 ?Y2 ?Y3 " // - + "{"// - + " ?Y1 ."// - + " ?Y2 ."// - + " ?Y3 ."// - + "}"; - - - - String q2 = "" // - + "SELECT ?Y1 ?Y2 ?Y3 " // - + "{"// - + " GRAPH { " // - + " ?Y1 ."// - + " ?Y2 ."// - + " ?Y3 ."// - + " } "// - + "}"; - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - - TupleExpr te1 = pq1.getTupleExpr(); - - List spList1 = StatementPatternCollector.process(te1); - - String rowString = spList1.get(0).getSubjectVar().getValue().stringValue(); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - StarQuery sq1 = new StarQuery(spList1); - - AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf); - -// Value v1 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(5))); -// Value v2 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(25))); - - List bsList = Lists.newArrayList(); -// QueryBindingSet b1 = (new QueryBindingSet()); -// b1.addBinding("X", vf.createURI("uri:5")); -// QueryBindingSet b2 = (new QueryBindingSet()); -// b2.addBinding("X", vf.createURI("uri:15")); -// QueryBindingSet b3 = (new QueryBindingSet()); -// b3.addBinding("X", vf.createURI("uri:25")); -// bsList.add(b1); -// bsList.add(b2); -// bsList.add(b3); - - -// BatchScanner bs = accCon.createBatchScanner(tablename + "doc_partitioned_index", new Authorizations("U"), 15); -// bs.setRanges(Collections.singleton(new Range(rowString))); -// Iterator> bsIt = bs.iterator(); -// while(bsIt.hasNext()) { -// String otherRowString = bsIt.next().getKey().getRow().toString(); -// if(rowString.equals(otherRowString)) { -// System.out.println(otherRowString); -// } -// -// } - - - - CloseableIteration sol1 = adi.queryDocIndex(sq1, bsList); - - System.out.println("**********************TEST 14***********************"); - int results = 0; - while(sol1.hasNext()) { - System.out.println(sol1.next()); - results++; - } - Assert.assertEquals(2, results); - - - - - ParsedQuery pq2 = parser.parseQuery(q2, null); - - TupleExpr te2 = pq2.getTupleExpr(); - - List spList2 = StatementPatternCollector.process(te2); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList2)); - - StarQuery sq2 = new StarQuery(spList2); - - - CloseableIteration sol2 = adi.queryDocIndex(sq2, bsList); - - System.out.println("**********************TEST 14***********************"); - results = 0; - while(sol2.hasNext()) { - System.out.println(sol2.next()); - results++; - } - Assert.assertEquals(1, results); - - - adi.close(); - - - - - } - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java deleted file mode 100644 index b6d5548ec..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java +++ /dev/null @@ -1,1357 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Set; -import java.util.concurrent.TimeUnit; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.layout.TablePrefixLayoutStrategy; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.entity.EntityOptimizer; -import mvm.rya.indexing.accumulo.entity.EntityTupleSet; -import mvm.rya.joinselect.AccumuloSelectivityEvalDAO; -import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; -import org.openrdf.repository.RepositoryException; - -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -public class EntityOptimizerTest { - - private static final String DELIM = "\u0000"; - private final byte[] EMPTY_BYTE = new byte[0]; - private final Value EMPTY_VAL = new Value(EMPTY_BYTE); - - private String q1 = ""// - + "SELECT ?h " // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + "}";// - - - private String q2 = ""// - + "SELECT ?h ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " ?m . " // - + " ?m . " // - + "}";// - - private String Q2 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h . "// - + " ?h ."// - + " ?h ."// - + " ?m . " // - + " ?m . " // - + "}";// - - private String q3 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " {?m } OPTIONAL {?m }. " // - + " {?m . ?m .} UNION {?m }. " // - + " ?l ."// - + " ?l ."// - + " ?l ."// - + "}";// - - private String Q4 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h ."// - + " ?m . " // - + " ?m . " // - + " ?h ."// - + " ?h . "// - + "}";// - - private String q5 = ""// - + "SELECT ?h ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h ?m . "// - + " ?m . " // - + " ?m . " // - + "}";// - - - - private String q6 = ""// - + "SELECT ?h ?i ?l ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " {?m } OPTIONAL {?m }. " // - + " {?m . ?m ?i. ?i .} " // - + " UNION {?m . ?l ?m . ?l . }. " // - + "}";// - - - private String q7 = ""// - + "SELECT ?h ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h ?m . "// - + " ?m . " // - + " ?m . " // - + "}";// - - - private String q8 = ""// - + "SELECT ?h ?m" // - + "{" // - + " Filter(?h = \"Diego\") " // - + " Filter(?m = \"Rosie\") " // - + " ?h ."// - + " ?h ."// - + " ?h ?m . "// - + " ?m . " // - + " ?m . " // - + "}";// - - - - - private String q9 = ""// - + "SELECT ?h ?i ?l ?m" // - + "{" // - + " Filter(?h = \"Diego\") " // - + " Filter(?m = \"Rosie\") " // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " {?m } OPTIONAL {?m }. " // - + " { Filter(?i = \"Bobo\"). ?m . ?m ?i. ?i .} " // - + " UNION {?m . ?l ?m . ?l . }. " // - + "}";// - - - - private Connector accCon; - AccumuloRdfConfiguration conf; - BatchWriterConfig config; - RdfEvalStatsDAO res; - - - - @Before - public void init() throws RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, AccumuloException, AccumuloSecurityException, TableExistsException { - - - - accCon = new MockInstance("instance").getConnector("root", "".getBytes()); - - config = new BatchWriterConfig(); - config.setMaxMemory(1000); - config.setMaxLatency(1000, TimeUnit.SECONDS); - config.setMaxWriteThreads(10); - - if (accCon.tableOperations().exists("rya_prospects")) { - try { - accCon.tableOperations().delete("rya_prospects"); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } - } - if (accCon.tableOperations().exists("rya_selectivity")) { - try { - accCon.tableOperations().delete("rya_selectivity"); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } - } - - accCon.tableOperations().create("rya_prospects"); - accCon.tableOperations().create("rya_selectivity"); - - Configuration con = new Configuration(); - con.set(ConfigUtils.CLOUDBASE_AUTHS, "U"); - con.set(ConfigUtils.CLOUDBASE_INSTANCE, "instance"); - con.set(ConfigUtils.CLOUDBASE_USER, "root"); - con.set(ConfigUtils.CLOUDBASE_PASSWORD, ""); - conf = new AccumuloRdfConfiguration(con); - TablePrefixLayoutStrategy tps = new TablePrefixLayoutStrategy("rya_"); - conf.setTableLayoutStrategy(tps); - conf.set(ConfigUtils.USE_MOCK_INSTANCE, "true"); - - - res = new ProspectorServiceEvalStatsDAO(accCon, conf); - - } - - - @Test - public void testOptimizeQ1SamePriority() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject"); - Mutation m1, m2, m3, m4; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("1".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - - bw1.addMutations(mList); - bw1.close(); - - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - bw2.addMutations(mList2); - bw2.close(); - - - TupleExpr te = getTupleExpr(q1); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - Assert.assertEquals(1, ccv.getCcNodes().size()); - - System.out.println(te); - - } - - - - - - @Test - public void testOptimizeQ2SamePriority() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject"); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("1".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - -// Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); -// scan.setRange(new Range()); -// -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getValue().get()))); -// } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - bw2.addMutations(mList2); - bw2.close(); - - - TupleExpr te = getTupleExpr(q2); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - Assert.assertEquals(2, ccv.getCcNodes().size()); - - - System.out.println(te); - - } - - - - - - - @Test - public void testOptimizeQ3SamePriority() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble"; - String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud"; - String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field"; - String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt"; - String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks"; - String s11 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television"; - - - - - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject"); - Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m6 = new Mutation(s6 + DELIM + "1"); - m6.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m7 = new Mutation(s7 + DELIM + "1"); - m7.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m8 = new Mutation(s8 + DELIM + "1"); - m8.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m9 = new Mutation(s9 + DELIM + "1"); - m9.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m10 = new Mutation(s10 + DELIM + "1"); - m10.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m11 = new Mutation(s11 + DELIM + "1"); - m11.put(new Text("count"), new Text(""), new Value("1".getBytes())); - - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - mList.add(m6); - mList.add(m7); - mList.add(m8); - mList.add(m9); - mList.add(m10); - mList.add(m11); - - bw1.addMutations(mList); - bw1.close(); - - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(s6); - m7 = new Mutation(s7); - m8 = new Mutation(s8); - m9 = new Mutation(s9); - m10 = new Mutation(s10); - m11 = new Mutation(s11); - m12 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m12.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m6.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m7.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m8.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m9.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m10.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m11.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - mList2.add(m7); - mList2.add(m8); - mList2.add(m9); - mList2.add(m10); - mList2.add(m11); - mList2.add(m12); - bw2.addMutations(mList2); - bw2.close(); - - - - TupleExpr te = getTupleExpr(q3); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - Assert.assertEquals(3, ccv.getCcNodes().size()); - - System.out.println(te); - - } - - - - - - - @Test - public void testOptimizeQ2DiffPriority() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicate" + DELIM + "uri:peesOn"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject"); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("3".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL); - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - bw2.addMutations(mList2); - bw2.close(); - - - TupleExpr te = getTupleExpr(q5); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - List nodes = Lists.newArrayList(ccv.getCcNodes()); - - Assert.assertEquals(2, nodes.size()); - - for(QueryModelNode q: nodes) { - - if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 2) { - Assert.assertEquals("h", ((EntityTupleSet)q).getStarQuery().getCommonVarName()); - } else if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 3) { - Assert.assertEquals("m", ((EntityTupleSet)q).getStarQuery().getCommonVarName()); - } else { - Assert.assertTrue(false); - } - } - - - - - - System.out.println(te); - - } - - - - - - - @Test - public void testOptimizeQ2DiffPriority2() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicate" + DELIM + "uri:peesOn"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject"); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("2".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - bw2.addMutations(mList2); - bw2.close(); - - - TupleExpr te = getTupleExpr(q5); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - List nodes = Lists.newArrayList(ccv.getCcNodes()); - - Assert.assertEquals(2, nodes.size()); - - - for(QueryModelNode q: nodes) { - - if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 2) { - Assert.assertEquals("m", ((EntityTupleSet)q).getStarQuery().getCommonVarName()); - } else if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 3) { - Assert.assertEquals("h", ((EntityTupleSet)q).getStarQuery().getCommonVarName()); - } else { - Assert.assertTrue(false); - } - } - - - System.out.println(te); - - } - - - - - - - @Test - public void testOptimizeQ6DiffPriority() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble"; - String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud"; - String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field"; - String s9 = "predicate" + DELIM + "uri:smells" ; - String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks"; - String s11 = "predicate" + DELIM + "uri:watches"; - - - - - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject","objectsubject", "objectpredicate", "objectobject"); - Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m6 = new Mutation(s6 + DELIM + "1"); - m6.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m7 = new Mutation(s7 + DELIM + "1"); - m7.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m8 = new Mutation(s8 + DELIM + "1"); - m8.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m9 = new Mutation(s9 + DELIM + "1"); - m9.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m10 = new Mutation(s10 + DELIM + "1"); - m10.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m11 = new Mutation(s11 + DELIM + "1"); - m11.put(new Text("count"), new Text(""), new Value("2".getBytes())); - - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - mList.add(m6); - mList.add(m7); - mList.add(m8); - mList.add(m9); - mList.add(m10); - mList.add(m11); - - bw1.addMutations(mList); - bw1.close(); - - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(s6); - m7 = new Mutation(s7); - m8 = new Mutation(s8); - m9 = new Mutation(s9); - m10 = new Mutation(s10); - m11 = new Mutation(s11); - m12 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m12.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m6.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m7.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m8.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m9.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m10.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m11.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - mList2.add(m7); - mList2.add(m8); - mList2.add(m9); - mList2.add(m10); - mList2.add(m11); - mList2.add(m12); - bw2.addMutations(mList2); - bw2.close(); - - - - TupleExpr te = getTupleExpr(q6); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - List nodes = Lists.newArrayList(ccv.getCcNodes()); - - Assert.assertEquals(3, nodes.size()); - List cVarList = Lists.newArrayList(); - cVarList.add("i"); - cVarList.add("m"); - - for(QueryModelNode q: nodes) { - - if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 2) { - String s = ((EntityTupleSet)q).getStarQuery().getCommonVarName(); - System.out.println("node is " + q + " and common var is " + s); - System.out.println("star query is " + ((EntityTupleSet)q).getStarQuery()); - Assert.assertTrue(cVarList.contains(s)); - cVarList.remove(s); - } else if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 3) { - Assert.assertEquals("h", ((EntityTupleSet)q).getStarQuery().getCommonVarName()); - } else { - Assert.assertTrue(false); - } - } - - - System.out.println(te); - - } - - - - - - - - - - - - @Test - public void testOptimizeConstantPriority() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:chickens"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:chickens"; - String s3 = "predicate" + DELIM + "uri:peesOn"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", - "objectpredicate", "objectobject"); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("2".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - bw2.addMutations(mList2); - bw2.close(); - - TupleExpr te = getTupleExpr(q7); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - List nodes = Lists.newArrayList(ccv.getCcNodes()); - System.out.println("Test 7 nodes are :" + nodes); - - Assert.assertEquals(2, nodes.size()); - - for (QueryModelNode q : nodes) { - - if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) { - Assert.assertEquals("m", ((EntityTupleSet) q).getStarQuery().getCommonVarName()); - } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) { - Assert.assertEquals("uri:chickens", ((EntityTupleSet) q).getStarQuery().getCommonVarName()); - } else { - Assert.assertTrue(false); - } - } - - System.out.println(te); - - } - - - - - - - @Test - public void testOptimizeFilters() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:chickens"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:chickens"; - String s3 = "predicate" + DELIM + "uri:peesOn"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", - "objectpredicate", "objectobject"); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("2".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - bw2.addMutations(mList2); - bw2.close(); - - TupleExpr te = getTupleExpr(q8); - (new FilterOptimizer()).optimize(te,null,null); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - List nodes = Lists.newArrayList(ccv.getCcNodes()); - System.out.println("Test 8 nodes are :" + nodes); - - Assert.assertEquals(2, nodes.size()); - - for (QueryModelNode q : nodes) { - - if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) { - Assert.assertEquals("m", ((EntityTupleSet) q).getStarQuery().getCommonVarName()); - } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) { - Assert.assertEquals("uri:chickens", ((EntityTupleSet) q).getStarQuery().getCommonVarName()); - } else { - Assert.assertTrue(false); - } - } - - System.out.println(te); - - } - - - - - - - @Test - public void testOptimizeFilter2() throws Exception { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(conf); - accc.setConnector(accCon); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble"; - String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud"; - String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field"; - String s9 = "predicate" + DELIM + "uri:smells" ; - String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks"; - String s11 = "predicate" + DELIM + "uri:watches"; - - - - - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject","objectsubject", "objectpredicate", "objectobject"); - Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m2 = new Mutation(s2 + DELIM + "1"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m6 = new Mutation(s6 + DELIM + "1"); - m6.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m7 = new Mutation(s7 + DELIM + "1"); - m7.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m8 = new Mutation(s8 + DELIM + "1"); - m8.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m9 = new Mutation(s9 + DELIM + "1"); - m9.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m10 = new Mutation(s10 + DELIM + "1"); - m10.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m11 = new Mutation(s11 + DELIM + "1"); - m11.put(new Text("count"), new Text(""), new Value("2".getBytes())); - - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - mList.add(m6); - mList.add(m7); - mList.add(m8); - mList.add(m9); - mList.add(m10); - mList.add(m11); - - bw1.addMutations(mList); - bw1.close(); - - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(s6); - m7 = new Mutation(s7); - m8 = new Mutation(s8); - m9 = new Mutation(s9); - m10 = new Mutation(s10); - m11 = new Mutation(s11); - m12 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m12.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - - - for (String s : sList) { - - m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m6.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m7.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m8.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m9.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - m10.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL); - m11.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL); - - - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - mList2.add(m6); - mList2.add(m7); - mList2.add(m8); - mList2.add(m9); - mList2.add(m10); - mList2.add(m11); - mList2.add(m12); - bw2.addMutations(mList2); - bw2.close(); - - - - TupleExpr te = getTupleExpr(q9); - System.out.println(te); - (new FilterOptimizer()).optimize(te,null,null); - - EntityOptimizer cco = new EntityOptimizer(accc); - System.out.println("Originial query is " + te); - cco.optimize(te, null, null); - - EntityCentricVisitor ccv = new EntityCentricVisitor(); - te.visit(ccv); - - List nodes = Lists.newArrayList(ccv.getCcNodes()); - - Assert.assertEquals(3, nodes.size()); - List cVarList = Lists.newArrayList(); - cVarList.add("i"); - cVarList.add("m"); - - for(QueryModelNode q: nodes) { - - if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 2) { - String s = ((EntityTupleSet)q).getStarQuery().getCommonVarName(); - System.out.println("node is " + q + " and common var is " + s); - System.out.println("star query is " + ((EntityTupleSet)q).getStarQuery()); - Assert.assertTrue(cVarList.contains(s)); - cVarList.remove(s); - } else if(((EntityTupleSet)q).getStarQuery().getNodes().size() == 3) { - Assert.assertEquals("h", ((EntityTupleSet)q).getStarQuery().getCommonVarName()); - } else { - Assert.assertTrue(false); - } - } - - - System.out.println(te); - - } - - - - - - - - - - - - - - - private TupleExpr getTupleExpr(String query) throws MalformedQueryException { - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(query, null); - - return pq.getTupleExpr(); - } - - - - - - - private class EntityCentricVisitor extends QueryModelVisitorBase { - - private Set ccNodes = Sets.newHashSet(); - - public Set getCcNodes() { - return ccNodes; - } - - - public void meetNode(QueryModelNode node) { - - if(node instanceof EntityTupleSet) { - ccNodes.add(node); - } - - super.meetNode(node); - } - - - - - } - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java deleted file mode 100644 index 0da484a13..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java +++ /dev/null @@ -1,290 +0,0 @@ -package mvm.rya.indexing.accumulo.entity; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.*; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.accumulo.documentIndex.TextColumn; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTypeResolverException; -import mvm.rya.indexing.accumulo.entity.StarQuery; - -import org.apache.hadoop.io.Text; -import org.junit.Assert; -import org.junit.Test; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Sets; -import com.google.common.primitives.Bytes; - -public class StarQueryTest { - - ValueFactory vf = new ValueFactoryImpl(); - - - @Test - public void testBasicFunctionality() { - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "GRAPH { " // - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}" // - + "}"; - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - try { - pq1 = parser.parseQuery(q1, null); - } catch (MalformedQueryException e) { - e.printStackTrace(); - } - - TupleExpr te1 = pq1.getTupleExpr(); - - System.out.println(te1); - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - - StarQuery sq1 = new StarQuery(spList1); - - Var v = sq1.getCommonVar(); - - Assert.assertEquals("X", v.getName()); - Assert.assertEquals(null, v.getValue()); - Assert.assertEquals(v.getValue(), sq1.getCommonVarValue()); - Assert.assertTrue(!sq1.commonVarHasValue()); - Assert.assertEquals("X", sq1.getCommonVarName()); - Assert.assertTrue(sq1.isCommonVarURI()); - - Assert.assertTrue(sq1.hasContext()); - Assert.assertEquals("http://joe", sq1.getContextURI()); - - TextColumn[] cond = sq1.getColumnCond(); - - for(int i = 0; i < cond.length; i++ ) { - - Assert.assertEquals(cond[i].getColumnFamily().toString(), "uri:cf" + (i+1)); - Assert.assertEquals(cond[i].getColumnQualifier().toString(), "object"); - - } - - Set unCommonVars = Sets.newHashSet(); - unCommonVars.add("Y1"); - unCommonVars.add("Y2"); - unCommonVars.add("Y3"); - Assert.assertEquals(unCommonVars, sq1.getUnCommonVars()); - - Map varPos = sq1.getVarPos(); - - Assert.assertEquals(0, varPos.get("Y1").intValue()); - Assert.assertEquals(1, varPos.get("Y2").intValue()); - Assert.assertEquals(2, varPos.get("Y3").intValue()); - - QueryBindingSet bs1 = new QueryBindingSet(); - QueryBindingSet bs2 = new QueryBindingSet(); - - Value v1 = vf.createURI("uri:hank"); - Value v2 = vf.createURI("uri:bob"); - - bs1.addBinding("X",v1); - bs2.addBinding("X", v1); - bs2.addBinding("Y3", v2); - - Set s1 = StarQuery.getCommonVars(sq1, bs1); - Set s2 = StarQuery.getCommonVars(sq1, bs2); - - Set s3 = Sets.newHashSet(); - Set s4 = Sets.newHashSet(); - s3.add("X"); - s4.add("X"); - s4.add("Y3"); - - - Assert.assertEquals(s1, s3); - Assert.assertEquals(s2, s4); - - - - } - - - - - - - - - - @Test - public void testGetContrainedQuery() { - - String q1 = "" // - + "SELECT ?X ?Y1 ?Y2 " // - + "{"// - + "GRAPH { " // - + "?X ?Y1 ."// - + "?X ?Y2 ."// - + "?X ?Y3 ."// - + "}" // - + "}"; - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - try { - pq1 = parser.parseQuery(q1, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - TupleExpr te1 = pq1.getTupleExpr(); - - System.out.println(te1); - List spList1 = StatementPatternCollector.process(te1); - - StarQuery sq1 = new StarQuery(spList1); - - QueryBindingSet bs1 = new QueryBindingSet(); - QueryBindingSet bs2 = new QueryBindingSet(); - - Value v1 = vf.createURI("uri:hank"); - Value v2 = vf.createURI("uri:bob"); - - bs1.addBinding("X",v1); - bs2.addBinding("X", v1); - bs2.addBinding("Y3", v2); - - StarQuery sq2 = StarQuery.getConstrainedStarQuery(sq1, bs1); - StarQuery sq3 = StarQuery.getConstrainedStarQuery(sq1, bs2); - - Assert.assertTrue(sq2.commonVarHasValue()); - Assert.assertEquals(sq2.getCommonVarValue(), "uri:hank"); - - Assert.assertTrue(sq3.commonVarHasValue()); - Assert.assertEquals(sq3.getCommonVarValue(), "uri:hank"); - - - TextColumn[] tc1 = sq1.getColumnCond(); - TextColumn[] tc2 = sq2.getColumnCond(); - TextColumn[] tc3 = sq3.getColumnCond(); - - for(int i = 0; i < tc1.length; i++) { - - Assert.assertTrue(tc1[i].equals(tc2[i])); - if(i != 2) { - Assert.assertTrue(tc1[i].equals(tc3[i])); - } else { - Assert.assertEquals(tc3[i].getColumnFamily(), new Text("uri:cf3")); - RyaType objType = RdfToRyaConversions.convertValue(v2); - byte[][] b1 = null; - try { - b1 = RyaContext.getInstance().serializeType(objType); - } catch (RyaTypeResolverException e) { - e.printStackTrace(); - } - byte[] b2 = Bytes.concat("object".getBytes(), - "\u0000".getBytes(), b1[0], b1[1]); - Assert.assertEquals(tc3[i].getColumnQualifier(), new Text(b2)); - Assert.assertTrue(!tc3[i].isPrefix()); - } - } - - - - } - - - - - @Test - public void testConstantPriority() { - - String q1 = "" // - + "SELECT ?X " // - + "{"// - + "GRAPH { " // - + "?X ."// - + "?X ."// - + "?X ."// - + "}" // - + "}"; - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - try { - pq1 = parser.parseQuery(q1, null); - } catch (MalformedQueryException e) { - e.printStackTrace(); - } - - TupleExpr te1 = pq1.getTupleExpr(); - - System.out.println(te1); - List spList1 = StatementPatternCollector.process(te1); - - Assert.assertTrue(StarQuery.isValidStarQuery(spList1)); - - - StarQuery sq1 = new StarQuery(spList1); - Var v = sq1.getCommonVar(); - - Assert.assertEquals("uri:obj1",v.getValue().stringValue()); - - - - } - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java deleted file mode 100644 index a0a3a0324..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java +++ /dev/null @@ -1,221 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.HashSet; -import java.util.Map.Entry; -import java.util.Set; - -import junit.framework.Assert; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.ConfigUtils; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.data.Key; -import org.apache.hadoop.conf.Configuration; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.RDFS; - -import com.google.common.collect.Sets; - -public class AccumuloFreeTextIndexerTest { - private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints(); - - Configuration conf; - - @Before - public void before() throws Exception { - String tableName = "triplestore_freetext"; - String termTableName = "triplestore_freetext_term"; - conf = new Configuration(); - conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, true); - conf.set(ConfigUtils.CLOUDBASE_USER, "USERNAME"); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "PASS"); - conf.set(ConfigUtils.FREE_TEXT_DOC_TABLENAME, tableName); - conf.set(ConfigUtils.FREE_TEXT_TERM_TABLENAME, termTableName); - conf.set(ConfigUtils.CLOUDBASE_AUTHS, "U"); - conf.setClass(ConfigUtils.TOKENIZER_CLASS, SimpleTokenizer.class, Tokenizer.class); - - createTable(conf, tableName); - createTable(conf, termTableName); - } - - private static void createTable(Configuration conf, String tablename) throws AccumuloException, AccumuloSecurityException, - TableNotFoundException, TableExistsException { - TableOperations tableOps = ConfigUtils.getConnector(conf).tableOperations(); - if (tableOps.exists(tablename)) { - tableOps.delete(tablename); - } - tableOps.create(tablename); - } - - @Test - public void testSearch() throws Exception { - - AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - - URI subject = new URIImpl("foo:subj"); - URI predicate = RDFS.LABEL; - Value object = vf.createLiteral("this is a new hat"); - - URI context = new URIImpl("foo:context"); - - Statement statement = vf.createStatement(subject, predicate, object, context); - f.storeStatement(RdfToRyaConversions.convertStatement(statement)); - f.flush(); - - printTables(conf); - - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("asdf", EMPTY_CONSTRAINTS))); - - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & !is", EMPTY_CONSTRAINTS))); - - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("is", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("a", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("new", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS))); - - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("ha*", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("*at", EMPTY_CONSTRAINTS))); - - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat & new", EMPTY_CONSTRAINTS))); - - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this & hat & new", EMPTY_CONSTRAINTS))); - - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("bat", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & bat", EMPTY_CONSTRAINTS))); - - f.close(); - } - - @Test - public void testRestrictPredicatesSearch() throws Exception { - conf.setStrings(ConfigUtils.FREETEXT_PREDICATES_LIST, "pred:1,pred:2"); - - AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer(); - f.setConf(conf); - - // These should not be stored because they are not in the predicate list - f.storeStatement(new RyaStatement(new RyaURI("foo:subj1"), new RyaURI(RDFS.LABEL.toString()), new RyaType("invalid"))); - f.storeStatement(new RyaStatement(new RyaURI("foo:subj2"), new RyaURI(RDFS.COMMENT.toString()), new RyaType("invalid"))); - - RyaURI pred1 = new RyaURI("pred:1"); - RyaURI pred2 = new RyaURI("pred:2"); - - // These should be stored because they are in the predicate list - RyaStatement s3 = new RyaStatement(new RyaURI("foo:subj3"), pred1, new RyaType("valid")); - RyaStatement s4 = new RyaStatement(new RyaURI("foo:subj4"), pred2, new RyaType("valid")); - f.storeStatement(s3); - f.storeStatement(s4); - - // This should not be stored because the object is not a literal - f.storeStatement(new RyaStatement(new RyaURI("foo:subj5"), pred1, new RyaURI("in:valid"))); - - f.flush(); - - printTables(conf); - - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("invalid", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("in:valid", EMPTY_CONSTRAINTS))); - - Set actual = getSet(f.queryText("valid", EMPTY_CONSTRAINTS)); - Assert.assertEquals(2, actual.size()); - Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s3))); - Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s4))); - - f.close(); - } - - @Test - public void testContextSearch() throws Exception { - - AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - URI subject = new URIImpl("foo:subj"); - URI predicate = new URIImpl(RDFS.COMMENT.toString()); - Value object = vf.createLiteral("this is a new hat"); - URI context = new URIImpl("foo:context"); - - Statement statement = vf.createStatement(subject, predicate, object, context); - f.storeStatement(RdfToRyaConversions.convertStatement(statement)); - f.flush(); - - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", new StatementContraints().setContext(context)))); - Assert.assertEquals(Sets.newHashSet(), - getSet(f.queryText("hat", new StatementContraints().setContext(vf.createURI("foo:context2"))))); - - f.close(); - } - - public static void printTables(Configuration conf) throws AccumuloException, AccumuloSecurityException, TableNotFoundException { - TableOperations tops = ConfigUtils.getConnector(conf).tableOperations(); - - // print tables - String FORMAT = "%-20s %-20s %-40s %-40s\n"; - for (String table : tops.list()) { - System.out.println("Reading : " + table); - System.out.format(FORMAT, "--Row--", "--ColumnFamily--", "--ColumnQualifier--", "--Value--"); - Scanner s = ConfigUtils.getConnector(conf).createScanner(table, Constants.NO_AUTHS); - for (Entry entry : s) { - Key k = entry.getKey(); - System.out.format(FORMAT, k.getRow(), k.getColumnFamily(), k.getColumnQualifier(), entry.getValue()); - } - System.out.println(); - } - - } - - private static Set getSet(CloseableIteration iter) throws Exception { - Set set = new HashSet(); - while (iter.hasNext()) { - set.add(iter.next()); - } - return set; - } -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java deleted file mode 100644 index 2097a02f6..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java +++ /dev/null @@ -1,130 +0,0 @@ -package mvm.rya.indexing.accumulo.freetext.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.List; - -import mvm.rya.indexing.accumulo.freetext.query.ASTExpression; -import mvm.rya.indexing.accumulo.freetext.query.ASTTerm; -import mvm.rya.indexing.accumulo.freetext.query.Node; -import mvm.rya.indexing.accumulo.freetext.query.ParseException; -import mvm.rya.indexing.accumulo.freetext.query.QueryParser; -import mvm.rya.indexing.accumulo.freetext.query.TokenMgrError; - -import org.apache.commons.lang.StringUtils; -import org.junit.Assert; -import org.junit.Test; - -public class QueryParserTest { - - @Test - public void AssortmentTest() throws Exception { - runTest("a* or b", // - "([WILDTERM]a* OR [TERM]b)"); - - runTest("a and b", // - "([TERM]a AND [TERM]b)"); - - runTest("a b", // - "([TERM]a AND [TERM]b)"); - - runTest("a b c", // - "([TERM]a AND [TERM]b AND [TERM]c)"); - - runTest("(a and b)", // - "([TERM]a AND [TERM]b)"); - - runTest("(a and b) and c", // - "(([TERM]a AND [TERM]b) AND [TERM]c)"); - - runTest("alpha and beta or charlie and delta or (boo and par)", // - "(([TERM]alpha AND [TERM]beta) OR ([TERM]charlie AND [TERM]delta) OR ([TERM]boo AND [TERM]par))"); - - runTest("a and (b or c)", // - "([TERM]a AND ([TERM]b OR [TERM]c))"); - - runTest("not a and (b or c)", // - "(![TERM]a AND ([TERM]b OR [TERM]c))"); - - runTest("not a and not (b or c)", // - "(![TERM]a AND !([TERM]b OR [TERM]c))"); - - runTest("not a and not (b or \"c and d\")", // - "(![TERM]a AND !([TERM]b OR [QUOTED]\"c and d\"))"); - - runTest("((a and b) and c)", // - "(([TERM]a AND [TERM]b) AND [TERM]c)"); - - runTest("not(a and b)", // - "!([TERM]a AND [TERM]b)"); - - runTest("not(not(a and b))", // - "([TERM]a AND [TERM]b)"); - - runTest("(not(!a and b))", // - "!(![TERM]a AND [TERM]b)"); - - runTest("not(!a and b)", // - "!(![TERM]a AND [TERM]b)"); - - runTest("not a", // - "![TERM]a"); - - runTest("not(not a)", // - "[TERM]a"); - - runTest("(not(!A or B))", // - "!(![TERM]A OR [TERM]B)"); - - runTest("not \"!A\"", // - "![QUOTED]\"!A\""); -} - - private static void runTest(String query, String expected) throws ParseException, TokenMgrError { - Assert.assertEquals(expected, prettyPrint(QueryParser.parse(query))); - } - - public static String prettyPrint(Node s) { - if (s instanceof ASTTerm) { - ASTTerm a = (ASTTerm) s; - return (a.isNotFlag() ? "!" : "") + "[" + a.getType() + "]" + a.getTerm(); - } - - String prefix = ""; - String suffix = ""; - String join = " "; - if (s instanceof ASTExpression) { - ASTExpression a = (ASTExpression) s; - prefix = (a.isNotFlag() ? "!" : "") + "("; - suffix = ")"; - join = " " + a.getType() + " "; - } - - List children = new ArrayList(); - for (int i = 0; i < s.jjtGetNumChildren(); i++) { - children.add(prettyPrint(s.jjtGetChild(i))); - } - return prefix + StringUtils.join(children, join) + suffix; - - } -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java deleted file mode 100644 index 4c22857fe..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java +++ /dev/null @@ -1,316 +0,0 @@ -package mvm.rya.indexing.accumulo.geo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.UUID; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaToRdfConversions; -import mvm.rya.indexing.GeoIndexer; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.geo.GeoConstants; -import mvm.rya.indexing.accumulo.geo.GeoMesaGeoIndexer; - -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.hadoop.conf.Configuration; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; - -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; -import com.vividsolutions.jts.geom.Coordinate; -import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.geom.GeometryFactory; -import com.vividsolutions.jts.geom.LineString; -import com.vividsolutions.jts.geom.LinearRing; -import com.vividsolutions.jts.geom.Point; -import com.vividsolutions.jts.geom.Polygon; -import com.vividsolutions.jts.geom.PrecisionModel; -import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence; - -/** - * Tests all of the "simple functions" of the geoindexer. - */ -public class GeoIndexerSfTest { - private static Configuration conf; - private static GeometryFactory gf = new GeometryFactory(new PrecisionModel(), 4326); - private static GeoMesaGeoIndexer g; - - private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints(); - - // Here is the landscape: - /** - *

-     * 	 +---+---+---+---+---+---+---+
-     * 	 |        F          |       |
-     * 	 +  A    +           +   C   +
-     * 	 |                   |       |
-     * 	 +---+---+    E      +---+---+
-     * 	 |       |   /       |
-     * 	 +   B   +  /+---+---+
-     * 	 |       | / |       |
-     * 	 +---+---+/--+---+---+
-     * 	         /   |     D |
-     * 	        /    +---+---+
-     * 
- **/ - - private static final Polygon A = poly(bbox(0, 1, 4, 5)); - private static final Polygon B = poly(bbox(0, 1, 2, 3)); - private static final Polygon C = poly(bbox(4, 3, 6, 5)); - private static final Polygon D = poly(bbox(3, 0, 5, 2)); - - private static final Point F = point(2, 4); - - private static final LineString E = line(2, 0, 3, 3); - - private static final Map names = Maps.newHashMap(); - static { - names.put(A, "A"); - names.put(B, "B"); - names.put(C, "C"); - names.put(D, "D"); - names.put(E, "E"); - names.put(F, "F"); - } - - @Before - public void before() throws Exception { - System.out.println(UUID.randomUUID().toString()); - String tableName = "triplestore_geospacial"; - conf = new Configuration(); - conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, true); - conf.set(ConfigUtils.CLOUDBASE_USER, "USERNAME"); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "PASS"); - conf.set(ConfigUtils.GEO_TABLENAME, tableName); - conf.set(ConfigUtils.CLOUDBASE_AUTHS, "U"); - - TableOperations tops = ConfigUtils.getConnector(conf).tableOperations(); - // get all of the table names with the prefix - Set toDel = Sets.newHashSet(); - for (String t : tops.list()) { - if (t.startsWith(tableName)) { - toDel.add(t); - } - } - for (String t : toDel) { - tops.delete(t); - } - - g = new GeoMesaGeoIndexer(); - g.setConf(conf); - g.storeStatement(statement(A)); - g.storeStatement(statement(B)); - g.storeStatement(statement(C)); - g.storeStatement(statement(D)); - g.storeStatement(statement(F)); - g.storeStatement(statement(E)); - } - - private static RyaStatement statement(Geometry geo) { - ValueFactory vf = new ValueFactoryImpl(); - Resource subject = vf.createURI("uri:" + names.get(geo)); - URI predicate = GeoConstants.GEO_AS_WKT; - Value object = vf.createLiteral(geo.toString(), GeoConstants.XMLSCHEMA_OGC_WKT); - return RdfToRyaConversions.convertStatement(new StatementImpl(subject, predicate, object)); - - } - - private static Point point(double x, double y) { - return gf.createPoint(new Coordinate(x, y)); - } - - private static LineString line(double x1, double y1, double x2, double y2) { - return new LineString(new PackedCoordinateSequence.Double(new double[] { x1, y1, x2, y2 }, 2), gf); - } - - private static Polygon poly(double[] arr) { - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(arr, 2)); - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - return p1; - } - - private static double[] bbox(double x1, double y1, double x2, double y2) { - return new double[] { x1, y1, x1, y2, x2, y2, x2, y1, x1, y1 }; - } - - public void compare(CloseableIteration actual, Geometry... expected) throws Exception { - Set expectedSet = Sets.newHashSet(); - for (Geometry geo : expected) { - expectedSet.add(RyaToRdfConversions.convertStatement(statement(geo))); - } - - Assert.assertEquals(expectedSet, getSet(actual)); - } - - private static Set getSet(CloseableIteration iter) throws Exception { - Set set = new HashSet(); - while (iter.hasNext()) { - set.add(iter.next()); - } - return set; - } - - private static Geometry[] EMPTY_RESULTS = {}; - - @Test - public void testEquals() throws Exception { - // point - compare(g.queryEquals(F, EMPTY_CONSTRAINTS), F); - compare(g.queryEquals(point(2, 2), EMPTY_CONSTRAINTS), EMPTY_RESULTS); - - // line - compare(g.queryEquals(E, EMPTY_CONSTRAINTS), E); - compare(g.queryEquals(line(2, 2, 3, 3), EMPTY_CONSTRAINTS), EMPTY_RESULTS); - - // poly - compare(g.queryEquals(A, EMPTY_CONSTRAINTS), A); - compare(g.queryEquals(poly(bbox(1, 1, 4, 5)), EMPTY_CONSTRAINTS), EMPTY_RESULTS); - - } - - @Test - public void testDisjoint() throws Exception { - // point - compare(g.queryDisjoint(F, EMPTY_CONSTRAINTS), B, C, D, E); - - // line - compare(g.queryDisjoint(E, EMPTY_CONSTRAINTS), B, C, D, F); - - // poly - compare(g.queryDisjoint(A, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - compare(g.queryDisjoint(B, EMPTY_CONSTRAINTS), C, D, F, E); - } - - @Test - public void testIntersectsPoint() throws Exception { - // This seems like a bug - // compare(g.queryIntersects(F, EMPTY_CONSTRAINTS), A, F); - // compare(g.queryIntersects(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - } - - @Test - public void testIntersectsLine() throws Exception { - // This seems like a bug - // compare(g.queryIntersects(E, EMPTY_CONSTRAINTS), A, E); - // compare(g.queryIntersects(E, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - } - - @Test - public void testIntersectsPoly() throws Exception { - compare(g.queryIntersects(A, EMPTY_CONSTRAINTS), A, B, C, D, F, E); - } - - @Test - public void testTouchesPoint() throws Exception { - compare(g.queryTouches(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - } - - @Test - public void testTouchesLine() throws Exception { - compare(g.queryTouches(E, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - } - - @Test - public void testTouchesPoly() throws Exception { - compare(g.queryTouches(A, EMPTY_CONSTRAINTS), C); - } - - @Test - public void testCrossesPoint() throws Exception { - compare(g.queryCrosses(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - } - - @Test - public void testCrossesLine() throws Exception { - // compare(g.queryCrosses(E, EMPTY_CONSTRAINTS), A); - } - - @Test - public void testCrossesPoly() throws Exception { - compare(g.queryCrosses(A, EMPTY_CONSTRAINTS), E); - } - - @Test - public void testWithin() throws Exception { - // point - // compare(g.queryWithin(F, EMPTY_CONSTRAINTS), F); - - // line - // compare(g.queryWithin(E, EMPTY_CONSTRAINTS), E); - - // poly - compare(g.queryWithin(A, EMPTY_CONSTRAINTS), A, B, F); - } - - @Test - public void testContainsPoint() throws Exception { - compare(g.queryContains(F, EMPTY_CONSTRAINTS), A, F); - } - - @Test - public void testContainsLine() throws Exception { - // compare(g.queryContains(E, EMPTY_CONSTRAINTS), E); - } - - @Test - public void testContainsPoly() throws Exception { - compare(g.queryContains(A, EMPTY_CONSTRAINTS), A); - compare(g.queryContains(B, EMPTY_CONSTRAINTS), A, B); - } - - @Test - public void testOverlapsPoint() throws Exception { - // compare(g.queryOverlaps(F, EMPTY_CONSTRAINTS), F); - // You cannot have overlapping points - // compare(g.queryOverlaps(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - } - - @Test - public void testOverlapsLine() throws Exception { - // compare(g.queryOverlaps(E, EMPTY_CONSTRAINTS), A, E); - // You cannot have overlapping lines - // compare(g.queryOverlaps(E, EMPTY_CONSTRAINTS), EMPTY_RESULTS); - } - - @Test - public void testOverlapsPoly() throws Exception { - compare(g.queryOverlaps(A, EMPTY_CONSTRAINTS), D); - } - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java deleted file mode 100644 index 8ca96bcc3..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java +++ /dev/null @@ -1,370 +0,0 @@ -package mvm.rya.indexing.accumulo.geo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement; -import info.aduna.iteration.CloseableIteration; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; - -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.accumulo.ConfigUtils; - -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.hadoop.conf.Configuration; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; - -import com.google.common.collect.Sets; -import com.vividsolutions.jts.geom.Coordinate; -import com.vividsolutions.jts.geom.GeometryFactory; -import com.vividsolutions.jts.geom.LinearRing; -import com.vividsolutions.jts.geom.Point; -import com.vividsolutions.jts.geom.Polygon; -import com.vividsolutions.jts.geom.PrecisionModel; -import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence; - -public class GeoIndexerTest { - - private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints(); - - Configuration conf; - GeometryFactory gf = new GeometryFactory(new PrecisionModel(), 4326); - - @Before - public void before() throws Exception { - System.out.println(UUID.randomUUID().toString()); - String tableName = "triplestore_geospacial"; - conf = new Configuration(); - conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, true); - conf.set(ConfigUtils.CLOUDBASE_USER, "USERNAME"); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "PASS"); - conf.set(ConfigUtils.GEO_TABLENAME, tableName); - conf.set(ConfigUtils.CLOUDBASE_AUTHS, "U"); - - TableOperations tops = ConfigUtils.getConnector(conf).tableOperations(); - // get all of the table names with the prefix - Set toDel = Sets.newHashSet(); - for (String t : tops.list()){ - if (t.startsWith(tableName)){ - toDel.add(t); - } - } - for (String t : toDel) { - tops.delete(t); - } - } - - @Test - public void testRestrictPredicatesSearch() throws Exception { - conf.setStrings(ConfigUtils.GEO_PREDICATES_LIST, "pred:1,pred:2"); - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - - Point point = gf.createPoint(new Coordinate(10, 10)); - Value pointValue = vf.createLiteral("Point(10 10)", GeoConstants.XMLSCHEMA_OGC_WKT); - URI invalidPredicate = GeoConstants.GEO_AS_WKT; - - // These should not be stored because they are not in the predicate list - f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), invalidPredicate, pointValue))); - f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), invalidPredicate, pointValue))); - - URI pred1 = vf.createURI("pred:1"); - URI pred2 = vf.createURI("pred:2"); - - // These should be stored because they are in the predicate list - Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1, pointValue); - Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2, pointValue); - f.storeStatement(convertStatement(s3)); - f.storeStatement(convertStatement(s4)); - - // This should not be stored because the object is not valid wkt - f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1, vf.createLiteral("soint(10 10)")))); - - // This should not be stored because the object is not a literal - f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj6"), pred1, vf.createURI("p:Point(10 10)")))); - - f.flush(); - - Set actual = getSet(f.queryEquals(point, EMPTY_CONSTRAINTS)); - Assert.assertEquals(2, actual.size()); - Assert.assertTrue(actual.contains(s3)); - Assert.assertTrue(actual.contains(s4)); - - f.close(); - } - - private static Set getSet(CloseableIteration iter) throws Exception { - Set set = new HashSet(); - while (iter.hasNext()) { - set.add(iter.next()); - } - return set; - } - - @Test - public void testPrimeMeridianSearch() throws Exception { - - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - Resource subject = vf.createURI("foo:subj"); - URI predicate = GeoConstants.GEO_AS_WKT; - Value object = vf.createLiteral("Point(0 0)", GeoConstants.XMLSCHEMA_OGC_WKT); - Resource context = vf.createURI("foo:context"); - - Statement statement = new ContextStatementImpl(subject, predicate, object, context); - f.storeStatement(convertStatement(statement)); - f.flush(); - - double[] ONE = { 1, 1, -1, 1, -1, -1, 1, -1, 1, 1 }; - double[] TWO = { 2, 2, -2, 2, -2, -2, 2, -2, 2, 2 }; - double[] THREE = { 3, 3, -3, 3, -3, -3, 3, -3, 3, 3 }; - - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2)); - LinearRing r2 = gf.createLinearRing(new PackedCoordinateSequence.Double(TWO, 2)); - LinearRing r3 = gf.createLinearRing(new PackedCoordinateSequence.Double(THREE, 2)); - - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - Polygon p2 = gf.createPolygon(r2, new LinearRing[] {}); - Polygon p3 = gf.createPolygon(r3, new LinearRing[] {}); - - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p2, EMPTY_CONSTRAINTS))); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p3, EMPTY_CONSTRAINTS))); - - // Test a ring with a hole in it - Polygon p3m2 = gf.createPolygon(r3, new LinearRing[] { r2 }); - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p3m2, EMPTY_CONSTRAINTS))); - - // test a ring outside the point - double[] OUT = { 3, 3, 1, 3, 1, 1, 3, 1, 3, 3 }; - LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2)); - Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {}); - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS))); - - f.close(); - } - - @Test - public void testDcSearch() throws Exception { - // test a ring around dc - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - Resource subject = vf.createURI("foo:subj"); - URI predicate = GeoConstants.GEO_AS_WKT; - Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT); - Resource context = vf.createURI("foo:context"); - - Statement statement = new ContextStatementImpl(subject, predicate, object, context); - f.storeStatement(convertStatement(statement)); - f.flush(); - - double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 }; - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2)); - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS))); - - // test a ring outside the point - double[] OUT = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 }; - LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2)); - Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {}); - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS))); - - f.close(); - } - - @Test - public void testDcSearchWithContext() throws Exception { - // test a ring around dc - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - Resource subject = vf.createURI("foo:subj"); - URI predicate = GeoConstants.GEO_AS_WKT; - Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT); - Resource context = vf.createURI("foo:context"); - - Statement statement = new ContextStatementImpl(subject, predicate, object, context); - f.storeStatement(convertStatement(statement)); - f.flush(); - - double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 }; - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2)); - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - - // query with correct context - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setContext(context)))); - - // query with wrong context - Assert.assertEquals(Sets.newHashSet(), - getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2"))))); - - f.close(); - } - - @Test - public void testDcSearchWithSubject() throws Exception { - // test a ring around dc - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - Resource subject = vf.createURI("foo:subj"); - URI predicate = GeoConstants.GEO_AS_WKT; - Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT); - Resource context = vf.createURI("foo:context"); - - Statement statement = new ContextStatementImpl(subject, predicate, object, context); - f.storeStatement(convertStatement(statement)); - f.flush(); - - double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 }; - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2)); - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - - // query with correct subject - Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setSubject(subject)))); - - // query with wrong subject - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2"))))); - - f.close(); - } - - @Test - public void testDcSearchWithSubjectAndContext() throws Exception { - // test a ring around dc - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - Resource subject = vf.createURI("foo:subj"); - URI predicate = GeoConstants.GEO_AS_WKT; - Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT); - Resource context = vf.createURI("foo:context"); - - Statement statement = new ContextStatementImpl(subject, predicate, object, context); - f.storeStatement(convertStatement(statement)); - f.flush(); - - double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 }; - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2)); - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - - // query with correct context subject - Assert.assertEquals(Sets.newHashSet(statement), - getSet(f.queryWithin(p1, new StatementContraints().setContext(context).setSubject(subject)))); - - // query with wrong context - Assert.assertEquals(Sets.newHashSet(), - getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2"))))); - - // query with wrong subject - Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2"))))); - - f.close(); - } - - @Test - public void testDcSearchWithPredicate() throws Exception { - // test a ring around dc - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - Resource subject = vf.createURI("foo:subj"); - URI predicate = GeoConstants.GEO_AS_WKT; - Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT); - Resource context = vf.createURI("foo:context"); - - Statement statement = new ContextStatementImpl(subject, predicate, object, context); - f.storeStatement(convertStatement(statement)); - f.flush(); - - double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 }; - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2)); - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - - // query with correct Predicate - Assert.assertEquals(Sets.newHashSet(statement), - getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(predicate))))); - - // query with wrong predicate - Assert.assertEquals(Sets.newHashSet(), - getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(vf.createURI("other:pred")))))); - - f.close(); - } - - // @Test - public void testAntiMeridianSearch() throws Exception { - // verify that a search works if the bounding box crosses the anti meridian - GeoMesaGeoIndexer f = new GeoMesaGeoIndexer(); - f.setConf(conf); - - ValueFactory vf = new ValueFactoryImpl(); - Resource context = vf.createURI("foo:context"); - - Resource subjectEast = vf.createURI("foo:subj:east"); - URI predicateEast = GeoConstants.GEO_AS_WKT; - Value objectEast = vf.createLiteral("Point(179 0)", GeoConstants.XMLSCHEMA_OGC_WKT); - Statement statementEast = new ContextStatementImpl(subjectEast, predicateEast, objectEast, context); - f.storeStatement(convertStatement(statementEast)); - - Resource subjectWest = vf.createURI("foo:subj:west"); - URI predicateWest = GeoConstants.GEO_AS_WKT; - Value objectWest = vf.createLiteral("Point(-179 0)", GeoConstants.XMLSCHEMA_OGC_WKT); - Statement statementWest = new ContextStatementImpl(subjectWest, predicateWest, objectWest, context); - f.storeStatement(convertStatement(statementWest)); - - f.flush(); - - double[] ONE = { 178.1, 1, -178, 1, -178, -1, 178.1, -1, 178.1, 1 }; - - LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2)); - - Polygon p1 = gf.createPolygon(r1, new LinearRing[] {}); - - Assert.assertEquals(Sets.newHashSet(statementEast, statementWest), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS))); - - f.close(); - } -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java deleted file mode 100644 index 60d237d07..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java +++ /dev/null @@ -1,1040 +0,0 @@ -/** - * - */ -package mvm.rya.indexing.accumulo.temporal; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import info.aduna.iteration.CloseableIteration; - -import java.io.IOException; -import java.io.PrintStream; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.atomic.AtomicLong; - -import junit.framework.Assert; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.indexing.StatementContraints; -import mvm.rya.indexing.TemporalInstant; -import mvm.rya.indexing.TemporalInterval; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.StatementSerializer; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.commons.codec.binary.StringUtils; -import org.apache.commons.io.output.NullOutputStream; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.QueryEvaluationException; - -import com.beust.jcommander.internal.Lists; - -/** - * JUnit tests for TemporalIndexer and it's implementation AccumuloTemporalIndexer - * - * If you enjoy this test, please read RyaTemporalIndexerTest and YagoKBTest, which contain - * many example SPARQL queries and updates and attempts to test independently of Accumulo: - * - * extras/indexingSail/src/test/java/mvm/rya/indexing/accumulo/RyaTemporalIndexerTest.java - * {@link mvm.rya.indexing.accumulo.RyaTemporalIndexerTest} - * {@link mvm.rya.indexing.accumulo.YagoKBTest.java} - * - * Remember, this class in instantiated fresh for each @test method. - * so fields are reset, unless they are static. - * - * These are covered: - * Instance {before, equals, after} given Instance - * Instance {before, after, inside} given Interval - * Instance {hasBeginning, hasEnd} given Interval - * And a few more. - * - */ -public final class AccumuloTemporalIndexerTest { - // Configuration properties, this is reset per test in setup. - Configuration conf; - // temporal indexer to test, this is created for each test method by setup. - AccumuloTemporalIndexer tIndexer; - - private static final String URI_PROPERTY_EVENT_TIME = "Property:event:time"; - private static final String URI_PROPERTY_CIRCA = "Property:circa"; - private static final String URI_PROPERTY_AT_TIME = "Property:atTime"; - private static final String STAT_COUNT = "count"; - private static final String STAT_KEYHASH = "keyhash"; - private static final String STAT_VALUEHASH = "valuehash"; - private static final String TEST_TEMPORAL_INDEX_TABLE_NAME = "testTemporalIndex"; - private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints(); - - // Recreate table name for each test instance in this JVM. - String uniquePerTestTemporalIndexTableName = TEST_TEMPORAL_INDEX_TABLE_NAME + String.format("%05d", nextTableSuffixAtomic.getAndIncrement()); - // start at 0, for uniqueness between jvm's consider AtomicLong(new Random().nextLong()) - private static final AtomicLong nextTableSuffixAtomic = new AtomicLong(); - - // Assign this in setUpBeforeClass, store them in each test. - // setup() deletes table before each test. - static final Statement spo_B00_E01; - static final Statement spo_B03_E20; - static final Statement spo_B02_E29; - static final Statement spo_B02_E30; - static final Statement spo_B02_E40; - static final Statement spo_B02_E31; - static final Statement spo_B29_E30; - static final Statement spo_B30_E32; - - // Instants: - static final Statement spo_B02; - static final int SERIES_OF_SECONDS = 41; - static final Statement seriesSpo[] = new Statement[SERIES_OF_SECONDS]; - - // These are shared for several tests. Only the seconds are different. - // tvB03_E20 read as: interval Begins 3 seconds, ends at 20 seconds - static final TemporalInterval tvB00_E01 = new TemporalInterval(// - makeInstant(00), // - makeInstant(01)); - static final TemporalInterval tvB29_E30= new TemporalInterval(// - makeInstant(29), // - makeInstant(30)); - static final TemporalInterval tvB30_E32= new TemporalInterval(// - makeInstant(30), // - makeInstant(32)); - static final TemporalInterval tvB03_E20 = new TemporalInterval(// - makeInstant(03), // - makeInstant(20)); - // 30 seconds, Begins earlier, ends later - static final TemporalInterval tvB02_E30= new TemporalInterval(// - makeInstant(02), // - makeInstant(30)); - // use for interval after - static final TemporalInterval tvB02_E29= new TemporalInterval(// - makeInstant(02), // - makeInstant(29)); - // same as above, but ends in the middle - static final TemporalInterval tvB02_E31 = new TemporalInterval(// - makeInstant(02), // - makeInstant(31)); - // same as above, but ends even later - static final TemporalInterval tvB02_E40 = new TemporalInterval(// - makeInstant(02), // - makeInstant(40)); - // instant, match beginnings of several above, before tiB03_E20 - static final TemporalInstant tsB02 = makeInstant(02); - // instant, after all above - static final TemporalInstant tsB04 = makeInstant(04); - - // Create a series of instants about times 0 - 40 seconds - static final TemporalInstant seriesTs[]; - static { - seriesTs = new TemporalInstant[SERIES_OF_SECONDS]; - for (int i = 0; i <= 40; i++) - seriesTs[i] = makeInstant(i); - }; - - /** - * Make an uniform instant with given seconds. - */ - static TemporalInstant makeInstant(int secondsMakeMeUnique) { - return new TemporalInstantRfc3339(2015, 12, 30, 12, 00, secondsMakeMeUnique); - } - - static { - // Setup the statements only once. Each test will store some of these in there own index table. - ValueFactory vf = new ValueFactoryImpl(); - URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME); - // tiB03_E20 read as: time interval that Begins 3 seconds, ends at 20 seconds, - // Each time element the same, except seconds. year, month, .... minute are the same for each statement below. - spo_B00_E01 = new StatementImpl(vf.createURI("foo:event0"), pred1_atTime, vf.createLiteral(tvB00_E01.toString())); - spo_B02_E29 = new StatementImpl(vf.createURI("foo:event2"), pred1_atTime, vf.createLiteral(tvB02_E29.toString())); - spo_B02_E30 = new StatementImpl(vf.createURI("foo:event2"), pred1_atTime, vf.createLiteral(tvB02_E30.toString())); - spo_B02_E31 = new StatementImpl(vf.createURI("foo:event3"), pred1_atTime, vf.createLiteral(tvB02_E31.toString())); - spo_B02_E40 = new StatementImpl(vf.createURI("foo:event4"), pred1_atTime, vf.createLiteral(tvB02_E40.toString())); - spo_B03_E20 = new StatementImpl(vf.createURI("foo:event5"), pred1_atTime, vf.createLiteral(tvB03_E20.toString())); - spo_B29_E30 = new StatementImpl(vf.createURI("foo:event1"), pred1_atTime, vf.createLiteral(tvB29_E30.toString())); - spo_B30_E32 = new StatementImpl(vf.createURI("foo:event1"), pred1_atTime, vf.createLiteral(tvB30_E32.toString())); - spo_B02 = new StatementImpl(vf.createURI("foo:event6"), pred1_atTime, vf.createLiteral(tsB02.getAsReadable())); - - // Create statements about time instants 0 - 40 seconds - for (int i = 0; i < seriesTs.length; i++) { - seriesSpo[i] = new StatementImpl(vf.createURI("foo:event0" + i), pred1_atTime, vf.createLiteral(seriesTs[i].getAsReadable())); - } - - } - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - } - - /** - * Create a table for test after deleting it. - */ - private static void createTable(Configuration conf, String tablename) - throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException { - TableOperations tableOps = ConfigUtils.getConnector(conf).tableOperations(); - if (tableOps.exists(tablename)) { - tableOps.delete(tablename); - } - tableOps.create(tablename); - } - - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - } - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - conf = new Configuration(); - conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, true); - conf.set(ConfigUtils.TEMPORAL_TABLENAME, uniquePerTestTemporalIndexTableName); - // This is from http://linkedevents.org/ontology - // and http://motools.sourceforge.net/event/event.html - conf.setStrings(ConfigUtils.TEMPORAL_PREDICATES_LIST, "" - + URI_PROPERTY_AT_TIME + "," - + URI_PROPERTY_CIRCA + "," - + URI_PROPERTY_EVENT_TIME); - - // delete and create table - createTable(conf, uniquePerTestTemporalIndexTableName); - tIndexer = new AccumuloTemporalIndexer(); - tIndexer.setConf(conf); - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - tIndexer.close(); - TableOperations tableOps = ConfigUtils.getConnector(conf).tableOperations(); - - if (tableOps.exists(uniquePerTestTemporalIndexTableName)) - tableOps.delete(uniquePerTestTemporalIndexTableName); - } - - /** - * Test method for {@link AccumuloTemporalIndexer#TemporalIndexerImpl(org.apache.hadoop.conf.Configuration)} . - * - * @throws TableExistsException - * @throws TableNotFoundException - * @throws AccumuloSecurityException - * @throws AccumuloException - * @throws IOException - */ - @Test - public void testTemporalIndexerImpl() - throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, IOException { - assertNotNull("Constructed.", tIndexer.toString()); - } - - /** - * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)} - * - * @throws NoSuchAlgorithmException - */ - @Test - public void testStoreStatement() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException { - // count rows expected to store: - int rowsStoredExpected = 0; - - ValueFactory vf = new ValueFactoryImpl(); - - URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME); - URI pred2_circa = vf.createURI(URI_PROPERTY_CIRCA); - - // Should not be stored because they are not in the predicate list - String validDateStringWithThirteens = "1313-12-13T13:13:13Z"; - tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), RDFS.LABEL, vf.createLiteral(validDateStringWithThirteens)))); - - // Test: Should not store an improper date, and log a warning (log warning not tested). - final String invalidDateString = "ThisIsAnInvalidDate"; -// // Silently logs a warning for bad dates. Old: Set true when we catch the error: -// boolean catchErrorThrownCorrectly = false; -// try { - tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), pred1_atTime, vf.createLiteral(invalidDateString)))); -// } catch (IllegalArgumentException e) { -// catchErrorThrownCorrectly = true; -// Assert.assertTrue( -// "Invalid date parse error should include the invalid string. message=" + e.getMessage(), -// e.getMessage().contains(invalidDateString)); -// } -// Assert.assertTrue("Invalid date parse error should be thrown for this bad date=" + invalidDateString, catchErrorThrownCorrectly); - - // These are different datetimes instant but from different time zones. - // This is an arbitrary zone, BRST=Brazil, better if not local. - // same as "2015-01-01T01:59:59Z" - final String testDate2014InBRST = "2014-12-31T23:59:59-02:00"; - // next year, same as "2017-01-01T01:59:59Z" - final String testDate2016InET = "2016-12-31T20:59:59-05:00"; - - // These should be stored because they are in the predicate list. - // BUT they will get converted to the same exact datetime in UTC. - Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1_atTime, vf.createLiteral(testDate2014InBRST)); - Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2_circa, vf.createLiteral(testDate2016InET)); - tIndexer.storeStatement(convertStatement(s3)); - rowsStoredExpected++; - tIndexer.storeStatement(convertStatement(s4)); - rowsStoredExpected++; - - // This should not be stored because the object is not a literal - tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1_atTime, vf.createURI("in:valid")))); - - tIndexer.flush(); - - int rowsStoredActual = printTables("junit testing: Temporal entities stored in testStoreStatement", null, null); - Assert.assertEquals("Number of rows stored.", rowsStoredExpected*4, rowsStoredActual); // 4 index entries per statement - - } - - @Test - public void testStoreStatementWithInterestingLiterals() throws Exception { - ValueFactory vf = new ValueFactoryImpl(); - - URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME); - - tIndexer.storeStatement(convertStatement(new StatementImpl( - vf.createURI("foo:subj2"), - pred1_atTime, - vf.createLiteral("A number of organizations located, gathered, or classed together. [Derived from Concise Oxford English Dictionary, 11th Edition, 2008]")))); - - int rowsStoredActual = printTables("junit testing: Temporal entities stored in testStoreStatement", null, null); - Assert.assertEquals("Number of rows stored.", 0, rowsStoredActual); // 4 index entries per statement - } - - /** - * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)} - * - * @throws NoSuchAlgorithmException - */ - @Test - public void testStoreStatementBadInterval() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException { - // count rows expected to store: - int rowsStoredExpected = 0; - - ValueFactory vf = new ValueFactoryImpl(); - URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME); - - // Test: Should not store an improper date interval, and log a warning (log warning not tested). - final String invalidDateIntervalString="[bad,interval]"; - // Silently logs a warning for bad dates. - tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), pred1_atTime, vf.createLiteral(invalidDateIntervalString)))); - - final String validDateIntervalString="[2016-12-31T20:59:59-05:00,2016-12-31T21:00:00-05:00]"; - tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), pred1_atTime, vf.createLiteral(validDateIntervalString)))); - rowsStoredExpected++; - - tIndexer.flush(); - - int rowsStoredActual = printTables("junit testing: Temporal intervals stored in testStoreStatement", null, null); - Assert.assertEquals("Only good intervals should be stored.", rowsStoredExpected*2, rowsStoredActual); // 2 index entries per interval statement - } - - @Test - public void testStoreStatementsSameTime() throws IOException, NoSuchAlgorithmException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException - { - ValueFactory vf = new ValueFactoryImpl(); - URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME); - URI pred2_circa = vf.createURI(URI_PROPERTY_CIRCA); - - // These are the same datetime instant but from different time - // zones. - // This is an arbitrary zone, BRST=Brazil, better if not local. - final String ZONETestDateInBRST = "2014-12-31T23:59:59-02:00"; - final String ZONETestDateInZulu = "2015-01-01T01:59:59Z"; - final String ZONETestDateInET = "2014-12-31T20:59:59-05:00"; - - // These all should be stored because they are in the predicate list. - // BUT they will get converted to the same exact datetime in UTC. - // So we have to make the key distinct! Good luck indexer! - Statement s1 = new StatementImpl(vf.createURI("foo:subj1"), pred2_circa, vf.createLiteral(ZONETestDateInET)); - Statement s2 = new StatementImpl(vf.createURI("foo:subj2"), pred1_atTime, vf.createLiteral(ZONETestDateInZulu)); - Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1_atTime, vf.createLiteral(ZONETestDateInBRST)); - int rowsStoredExpected = 0; - tIndexer.storeStatement(convertStatement(s1)); - rowsStoredExpected++; - tIndexer.storeStatement(convertStatement(s2)); - rowsStoredExpected++; - tIndexer.storeStatement(convertStatement(s3)); - rowsStoredExpected++; - int rowsStoredActual = printTables("junit testing: Duplicate times stored", null /*System.out*/, null); - Assert.assertEquals("Number of Duplicate times stored, 1 means duplicates not handled correctly.", rowsStoredExpected*4, rowsStoredActual); - } - - /** - * Test method for {@link AccumuloTemporalIndexer#storeStatements(java.util.Collection)} . - * - * @throws TableExistsException - * @throws TableNotFoundException - * @throws AccumuloSecurityException - * @throws AccumuloException - * @throws IOException - * @throws IllegalArgumentException - * @throws NoSuchAlgorithmException - */ - @Test - public void testStoreStatements() - throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, IllegalArgumentException, IOException, - NoSuchAlgorithmException { - long valueHash = 0; - Collection statements = new ArrayList(70); - statements.addAll(Arrays.asList(seriesSpo)); - int rowsStoredExpected = statements.size()*4; // instants store 4 each - // hash the expected output: - for (Statement statement : statements) { - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement))); - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement))); - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement))); - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement))); - } - statements.add(spo_B02_E30); - rowsStoredExpected += 2; // intervals store two dates - statements.add(spo_B30_E32); - rowsStoredExpected += 2; // intervals store two dates - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(spo_B02_E30))); - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(spo_B02_E30))); - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(spo_B30_E32))); - valueHash = hasher(valueHash, StringUtils.getBytesUtf8(StatementSerializer.writeStatement(spo_B30_E32))); - // duplicates will overwrite old ones, no change in the output except timestamps - statements.add(spo_B30_E32); - statements.add(spo_B30_E32); - - List ryaStatements = Lists.newArrayList(); - for (Statement s : statements){ ryaStatements.add(convertStatement(s));} - tIndexer.storeStatements(ryaStatements); - - Map statistics = new HashMap(); - int rowsStoredActual = printTables("junit testing: StoreStatements multiple statements", null, statistics); - Assert.assertEquals("Number of rows stored.", rowsStoredExpected, rowsStoredActual); // 4 index entries per statement - Assert.assertEquals("value hash.", valueHash, statistics.get(STAT_VALUEHASH).longValue()); - } - - /** - * test this classe's hash method to check un-ordered results. - */ - @Test - public void testSelfTestHashMethod() { - // self test on the hash method: - long hash01dup1 = hasher(0, new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); - long hash01dup2 = hasher(0, new byte[] { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }); - Assert.assertEquals("same numbers, different sequence, hash should be the same.", hash01dup1, hash01dup2); - - // this one fails for sum hash, passes for XOR - long hash02dup1 = hasher(0, new byte[] { 123, 2, 1, 1 }); - hash02dup1 = hasher(hash02dup1, new byte[] { 123, 1, 1, 2 }); - long hash02dup2 = hasher(0, new byte[] { 123, 1, 1, 2 }); - hash02dup2 = hasher(hash02dup2, new byte[] { 123, 1, 3, 0, 0 }); - Assert.assertTrue("Different numbers, should be different hashes: " + hash02dup1 + " != " + hash02dup2, hash02dup1 != hash02dup2); - } - /** - * Test instant equal to a given instant. - * From the series: instant {equal, before, after} instant - * @throws AccumuloSecurityException - * @throws AccumuloException - * @throws TableNotFoundException - */ - @Test - public void testQueryInstantEqualsInstant() throws IOException, QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - int expectedStoreCount = 5 * 2; // two entries for intervals - - // seriesSpo[s] and seriesTs[s] are statements and instant for s seconds after the uniform time. - int searchForSeconds = 5; - int expectedResultCount = 1; - for (int s = 0; s <= searchForSeconds + 3; s++) { // <== logic here - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - expectedStoreCount+=4; //4 entries per statement. - } - tIndexer.flush(); - - int rowsStoredActual = printTables("junit testing: testQueryInstantEqualsInstant 0 to 8 seconds and 5 intervals stored. expectedStoreCount="+expectedStoreCount, null /*System.out*/, null); - Assert.assertEquals("Should find count of rows.", expectedStoreCount, rowsStoredActual); - - CloseableIteration iter; - iter = tIndexer.queryInstantEqualsInstant(seriesTs[searchForSeconds], EMPTY_CONSTRAINTS); // <== logic here - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[searchForSeconds]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - - } - - /** - * Test instant after a given instant. - * From the series: instant {equal, before, after} instant - * @throws AccumuloSecurityException - * @throws AccumuloException - * @throws TableNotFoundException - */ - @Test - public void testQueryInstantAfterInstant() throws IOException, QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instant for s seconds after the uniform time. - int searchForSeconds = 4; - int expectedResultCount = 9; - for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - tIndexer.flush(); - CloseableIteration iter; - iter = tIndexer.queryInstantAfterInstant(seriesTs[searchForSeconds], EMPTY_CONSTRAINTS); - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[searchForSeconds + count + 1]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - } - /** - * Test instant before a given instant. - * From the series: instant {equal, before, after} instant - */ - @Test - public void testQueryInstantBeforeInstant() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instant for s seconds after the uniform time. - int searchForSeconds = 4; - int expectedResultCount = 4; - for (int s = 0; s <= searchForSeconds + 15; s++) { // <== logic here - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - tIndexer.flush(); - CloseableIteration iter; - - iter = tIndexer.queryInstantBeforeInstant(seriesTs[searchForSeconds], EMPTY_CONSTRAINTS); - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[count]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - } - - /** - * Test instant before given interval. - * From the series: Instance {before, after, inside} given Interval - */ - @Test - public void testQueryInstantBeforeInterval() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time. - TemporalInterval searchForSeconds = tvB02_E31; - int expectedResultCount = 2; // 00 and 01 seconds. - for (int s = 0; s <= 40; s++) { // <== logic here - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - tIndexer.flush(); - CloseableIteration iter; - iter = tIndexer.queryInstantBeforeInterval(searchForSeconds, EMPTY_CONSTRAINTS); - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[count]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - } - - /** - * Test instant after given interval. - * Instance {before, after, inside} given Interval - */ - @Test - public void testQueryInstantAfterInterval() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time. - TemporalInterval searchAfterInterval = tvB02_E31; // from 2 to 31 seconds - int endingSeconds = 31; - int expectedResultCount = 9; // 32,33,...,40 seconds. - for (int s = 0; s <= endingSeconds + expectedResultCount; s++) { // <== logic here - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - tIndexer.flush(); - CloseableIteration iter; - iter = tIndexer.queryInstantAfterInterval(searchAfterInterval, EMPTY_CONSTRAINTS); - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[count + endingSeconds + 1]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - } - - /** - * Test instant inside given interval. - * Instance {before, after, inside} given Interval - */ - @Test - public void testQueryInstantInsideInterval() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time. - TemporalInterval searchInsideInterval = tvB02_E31; // from 2 to 31 seconds - int beginningSeconds = 2; // <== logic here, and next few lines. - int endingSeconds = 31; - int expectedResultCount = endingSeconds - beginningSeconds - 1; // 3,4,...,30 seconds. - for (int s = 0; s <= 40; s++) { - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - tIndexer.flush(); - CloseableIteration iter; - iter = tIndexer.queryInstantInsideInterval(searchInsideInterval, EMPTY_CONSTRAINTS); - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[count + beginningSeconds + 1]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - } - /** - * Test instant is the Beginning of the given interval. - * from the series: Instance {hasBeginning, hasEnd} Interval - */ - @Test - public void testQueryInstantHasBeginningInterval() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time. - TemporalInterval searchInsideInterval = tvB02_E31; // from 2 to 31 seconds - int searchSeconds = 2; // <== logic here, and next few lines. - int expectedResultCount = 1; // 2 seconds. - for (int s = 0; s <= 10; s++) { - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - tIndexer.flush(); - CloseableIteration iter; - iter = tIndexer.queryInstantHasBeginningInterval(searchInsideInterval, EMPTY_CONSTRAINTS); - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[searchSeconds]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - } - /** - * Test instant is the end of the given interval. - * from the series: Instance {hasBeginning, hasEnd} Interval - */ - @Test - public void testQueryInstantHasEndInterval() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time. - TemporalInterval searchInsideInterval = tvB02_E31; // from 2 to 31 seconds - int searchSeconds = 31; // <== logic here, and next few lines. - int expectedResultCount = 1; // 31 seconds. - for (int s = 0; s <= 40; s++) { - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - tIndexer.flush(); - CloseableIteration iter; - iter = tIndexer.queryInstantHasEndInterval(searchInsideInterval, EMPTY_CONSTRAINTS); - int count = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - Statement nextExpectedStatement = seriesSpo[searchSeconds]; // <== logic here - assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count++; - } - Assert.assertEquals("Should find count of rows.", expectedResultCount, count); - } - - /** - * Test method for - * {@link mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer#queryIntervalEquals(TemporalInterval, StatementContraints)} - * . - * @throws IOException - * @throws QueryEvaluationException - * - */ - @Test - public void testQueryIntervalEquals() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - tIndexer.storeStatement(convertStatement(seriesSpo[4])); // instance at 4 seconds - tIndexer.flush(); - - CloseableIteration iter; - iter = tIndexer.queryIntervalEquals(tvB02_E40, EMPTY_CONSTRAINTS); - // Should be found twice: - Assert.assertTrue("queryIntervalEquals: spo_B02_E40 should be found, but actually returned empty results. spo_B02_E40=" + spo_B02_E40, iter.hasNext()); - Assert.assertTrue("queryIntervalEquals: spo_B02_E40 should be found, but does not match.", spo_B02_E40.equals(iter.next())); - Assert.assertFalse("queryIntervalEquals: Find no more than one, but actually has more.", iter.hasNext()); - } - - /** - * Test interval before a given interval, for method: - * {@link AccumuloTemporalIndexer#queryIntervalBefore(TemporalInterval, StatementContraints)}. - * - * @throws IOException - * @throws QueryEvaluationException - */ - @Test - public void testQueryIntervalBefore() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - tIndexer.storeStatement(convertStatement(spo_B00_E01)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - // instants should be ignored. - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds - tIndexer.storeStatement(convertStatement(seriesSpo[2])); - tIndexer.storeStatement(convertStatement(seriesSpo[31])); - tIndexer.flush(); - - CloseableIteration iter; - iter = tIndexer.queryIntervalBefore(tvB02_E31, EMPTY_CONSTRAINTS); - // Should be found twice: - Assert.assertTrue("spo_B00_E01 should be found, but actually returned empty results. spo_B00_E01=" + spo_B00_E01, iter.hasNext()); - Assert.assertTrue("spo_B00_E01 should be found, but found another.", spo_B00_E01.equals(iter.next())); - Assert.assertFalse("Find no more than one, but actually has more.", iter.hasNext()); - } - - /** - * interval is after the given interval. Find interval beginnings after the endings of the given interval. - * {@link AccumuloTemporalIndexer#queryIntervalAfter(TemporalInterval, StatementContraints). - * - * @throws IOException - * @throws QueryEvaluationException - */ - @Test - public void testQueryIntervalAfter() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - tIndexer.storeStatement(convertStatement(spo_B00_E01)); - tIndexer.storeStatement(convertStatement(spo_B02_E29)); //<- after this one. - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B29_E30)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - // instants should be ignored. - tIndexer.storeStatement(convertStatement(spo_B02)); - tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds - tIndexer.storeStatement(convertStatement(seriesSpo[2])); - tIndexer.storeStatement(convertStatement(seriesSpo[31])); - tIndexer.flush(); - - CloseableIteration iter; - iter = tIndexer.queryIntervalAfter(tvB02_E29, EMPTY_CONSTRAINTS); - // Should be found twice: - Assert.assertTrue("spo_B30_E32 should be found, but actually returned empty results. spo_B30_E32=" + spo_B30_E32, iter.hasNext()); - Statement s = iter.next(); - Assert.assertTrue("spo_B30_E32 should be found, but found another. spo_B30_E32="+spo_B30_E32+", but found="+s, spo_B30_E32.equals(s)); - Assert.assertFalse("Find no more than one, but actually has more.", iter.hasNext()); - - } - - /** - * Test instant after a given instant WITH two different predicates as constraints. - */ - @Test - public void testQueryWithMultiplePredicates() throws IOException, QueryEvaluationException { - // tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds - // these should not match as they are not instances. - tIndexer.storeStatement(convertStatement(spo_B03_E20)); - tIndexer.storeStatement(convertStatement(spo_B02_E30)); - tIndexer.storeStatement(convertStatement(spo_B02_E40)); - tIndexer.storeStatement(convertStatement(spo_B02_E31)); - tIndexer.storeStatement(convertStatement(spo_B30_E32)); - - // seriesSpo[s] and seriesTs[s] are statements and instant for s seconds after the uniform time. - int searchForSeconds = 4; - int expectedResultCount = 9; - for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here - tIndexer.storeStatement(convertStatement(seriesSpo[s])); - } - ValueFactory vf = new ValueFactoryImpl(); - URI pred3_CIRCA_ = vf.createURI(URI_PROPERTY_CIRCA); // this one to ignore. - URI pred2_eventTime = vf.createURI(URI_PROPERTY_EVENT_TIME); - URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME); - - // add the predicate = EventTime ; Store in an array for verification. - Statement[] SeriesTs_EventTime = new Statement[expectedResultCount+1]; - for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here - Statement statement = new StatementImpl(vf.createURI("foo:EventTimeSubj0" + s), pred2_eventTime, vf.createLiteral(seriesTs[s].getAsReadable())); - tIndexer.storeStatement(convertStatement(statement)); - if (s>searchForSeconds) - SeriesTs_EventTime[s - searchForSeconds -1 ] = statement; - } - // add the predicate = CIRCA ; to be ignored because it is not in the constraints. - for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here - Statement statement = new StatementImpl(vf.createURI("foo:CircaEventSubj0" + s), pred3_CIRCA_, vf.createLiteral(seriesTs[s].getAsReadable())); - tIndexer.storeStatement(convertStatement(statement)); - } - tIndexer.flush(); - CloseableIteration iter; - StatementContraints constraints = new StatementContraints(); - constraints.setPredicates(new HashSet(Arrays.asList( pred2_eventTime, pred1_atTime ))); - - iter = tIndexer.queryInstantAfterInstant(seriesTs[searchForSeconds], constraints); // EMPTY_CONSTRAINTS);// - int count_AtTime = 0; - int count_EventTime = 0; - while (iter.hasNext()) { - Statement s = iter.next(); - //System.out.println("testQueryWithMultiplePredicates result="+s); - Statement nextExpectedStatement = seriesSpo[searchForSeconds + count_AtTime + 1]; // <== logic here - if (s.getPredicate().equals(pred1_atTime)) { - assertTrue("Should match atTime: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s)); - count_AtTime++; - } - else if (s.getPredicate().equals(pred2_eventTime)) { - assertTrue("Should match eventTime: " + SeriesTs_EventTime[count_EventTime] + " == " + s, SeriesTs_EventTime[count_EventTime].equals(s)); - count_EventTime++; - } else { - assertTrue("This predicate should not be returned: "+s, false); - } - - } - - Assert.assertEquals("Should find count of atTime rows.", expectedResultCount, count_AtTime); - Assert.assertEquals("Should find count of eventTime rows.", expectedResultCount, count_EventTime); - } - - - /** - * Test method for {@link AccumuloTemporalIndexer#getIndexablePredicates()} . - * - * @throws TableExistsException - * @throws TableNotFoundException - * @throws AccumuloSecurityException - * @throws AccumuloException - * @throws IOException - */ - @Test - public void testGetIndexablePredicates() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, IOException { - Set p = tIndexer.getIndexablePredicates(); - Assert.assertEquals("number of predicates returned:", 3, p.size()); - } - - /** - * Count all the entries in the temporal index table, return the count. - * Uses printTables for reliability. - * - */ - public int countAllRowsInTable() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, NoSuchAlgorithmException { - return printTables("Counting rows.", null, null); - } - - /** - * Print and gather statistics on the entire index table. - * - * @param description - * Printed to the console to find the test case. - * @param out - * null or System.out or other output to send a listing. - * @param statistics - * Hashes, sums, and counts for assertions. - * @return Count of entries in the index table. - */ - public int printTables(String description, PrintStream out, Map statistics) - throws TableNotFoundException, AccumuloException, AccumuloSecurityException - { - if (out == null) { - out = new PrintStream(new NullOutputStream()); - } - out.println("-- start printTables() -- " + description); - String FORMAT = "%-20s %-20s %-40s %-40s\n"; - int rowsPrinted = 0; - long keyHasher = 0; - long valueHasher = 0; - out.println("Reading : " + this.uniquePerTestTemporalIndexTableName); - out.format(FORMAT, "--Row--", "--ColumnFamily--", "--ColumnQualifier--", "--Value--"); - - Scanner s = ConfigUtils.getConnector(conf).createScanner(this.uniquePerTestTemporalIndexTableName, Constants.NO_AUTHS); - for (Entry entry : s) { - rowsPrinted++; - Key k = entry.getKey(); - out.format(FORMAT, toHumanString(k.getRow()), - toHumanString(k.getColumnFamily()), - toHumanString(k.getColumnQualifier()), - toHumanString(entry.getValue())); - keyHasher = hasher(keyHasher, (StringUtils.getBytesUtf8(entry.getKey().toStringNoTime()))); - valueHasher = hasher(valueHasher, (entry.getValue().get())); - } - out.println(); - // } - - if (statistics != null) { - statistics.put(STAT_COUNT, (long) rowsPrinted); - statistics.put(STAT_KEYHASH, keyHasher); - statistics.put(STAT_VALUEHASH, valueHasher); - } - - return rowsPrinted; - - } - - /** - * Order independent hashcode. - * Read more: http://stackoverflow.com/questions/18021643/hashing-a-set-of-integers-in-an-order-independent-way - * - * @param hashcode - * @param list - * @return - */ - private static long hasher(long hashcode, byte[] list) { - long sum = 0; - for (byte val : list) { - sum += 1L + val; - } - hashcode ^= sum; - return hashcode; - } - - /** - * convert a non-utf8 byte[] and text and value to string and show unprintable bytes as {xx} where x is hex. - * @param value - * @return Human readable representation. - */ - static String toHumanString(Value value) { - return toHumanString(value==null?null:value.get()); - } - static String toHumanString(Text text) { - return toHumanString(text==null?null:text.copyBytes()); - } - static String toHumanString(byte[] bytes) { - if (bytes==null) - return "{null}"; - StringBuilder sb = new StringBuilder(); - for (byte b : bytes) { - if ((b > 0x7e) || (b < 32)) { - sb.append("{"); - sb.append(Integer.toHexString( b & 0xff )); // Lop off the sign extended ones. - sb.append("}"); - } else if (b == '{'||b == '}') { // Escape the literal braces. - sb.append("{"); - sb.append((char)b); - sb.append("}"); - } else - sb.append((char)b); - } - return sb.toString(); - } -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java deleted file mode 100644 index 6363372c6..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java +++ /dev/null @@ -1,96 +0,0 @@ -package mvm.rya.indexing.accumulo.temporal; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.TimeZone; - -import mvm.rya.indexing.TemporalInstant; - -import org.apache.commons.codec.binary.StringUtils; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.ISODateTimeFormat; -import org.junit.Assert; -import org.junit.Test; - -public class TemporalInstantTest { - @Test - public void constructorTest() throws Exception { - TemporalInstant instant = new TemporalInstantRfc3339(2014, 12, 30, // - 12, 59, 59); - // YYYY-MM-DDThh:mm:ssZ - String stringTestDate01 = "2014-12-30T12:59:59Z"; - Date dateTestDate01 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssX") - .parse(stringTestDate01); - Assert.assertEquals(stringTestDate01, instant.getAsKeyString()); - Assert.assertArrayEquals(StringUtils.getBytesUtf8(instant.getAsKeyString()), instant.getAsKeyBytes()); - Assert.assertTrue("Key must be normalized to time zone Zulu",instant.getAsKeyString().endsWith("Z")); - // show the local time us. - // Warning, if test is run in the London, or Zulu time zone, this test will be same as above, with the Z. - // TimeZone.setDefault(TimeZone.getTimeZone("UTC")); // this does not affect the library, don't use. - String stringLocalTestDate01 = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ssXXX").format(dateTestDate01); - // for ET, will be: "2014-12-30T07:59:59-05:00" - //instant.getAsDateTime().withZone(null); -// System.out.println("===System.getProperty(user.timezone)="+System.getProperty("user.timezone")); //=ET -// System.out.println("===============TimeZone.getDefault()="+TimeZone.getDefault()); //=ET -// System.out.println("===========DateTimeZone.getDefault()="+DateTimeZone.getDefault()); //=UTC (wrong!) - // the timezone default gets set to UTC by some prior test, fix it here. - DateTimeZone newTimeZone = null; - try { - String id = System.getProperty("user.timezone"); - if (id != null) { - newTimeZone = DateTimeZone.forID(id); - } - } catch (RuntimeException ex) { - // ignored - } - if (newTimeZone == null) { - newTimeZone = DateTimeZone.forTimeZone(TimeZone.getDefault()); - } - DateTimeZone.setDefault(newTimeZone); - // null timezone means use the default: - Assert.assertEquals("Joda time library (actual) should use same local timezone as Java date (expected).", stringLocalTestDate01, instant.getAsReadable(null)); - } - @Test - public void zoneTestTest() throws Exception { - final String ZONETestDateInBRST = "2014-12-31T23:59:59-02:00"; // arbitrary zone, BRST=Brazil, better if not local. - final String ZONETestDateInZulu = "2015-01-01T01:59:59Z"; - final String ZONETestDateInET = "2014-12-31T20:59:59-05:00"; - TemporalInstant instant = new TemporalInstantRfc3339(DateTime.parse(ZONETestDateInBRST)); - - Assert.assertEquals("Test our test Zulu, ET strings.", ZONETestDateInET, DateTime.parse(ZONETestDateInZulu).withZone(DateTimeZone.forID("-05:00")).toString(ISODateTimeFormat.dateTimeNoMillis())); - Assert.assertEquals("Test our test BRST,Zulu strings.", ZONETestDateInZulu, DateTime.parse(ZONETestDateInBRST).withZone(DateTimeZone.UTC).toString(ISODateTimeFormat.dateTimeNoMillis())); - - Assert.assertTrue("Key must be normalized to time zone Zulu: "+instant.getAsKeyString(), instant.getAsKeyString().endsWith("Z")); - Assert.assertEquals("Key must be normalized from BRST -02:00", ZONETestDateInZulu, instant.getAsKeyString()); - Assert.assertArrayEquals(StringUtils.getBytesUtf8(instant.getAsKeyString()), instant.getAsKeyBytes()); - - Assert.assertTrue( "Ignore original time zone.", ! ZONETestDateInBRST.equals( instant.getAsReadable(DateTimeZone.forID("-07:00")))); - Assert.assertEquals( "Use original time zone.", ZONETestDateInBRST, instant.getAsDateTime().toString(TemporalInstantRfc3339.FORMATTER)); - Assert.assertEquals( "Time at specified time zone.", ZONETestDateInET, instant.getAsReadable(DateTimeZone.forID("-05:00"))); - - instant = new TemporalInstantRfc3339(DateTime.parse(ZONETestDateInZulu)); - Assert.assertEquals("expect a time with specified time zone.", ZONETestDateInET, instant.getAsReadable(DateTimeZone.forID("-05:00"))); - } - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java deleted file mode 100644 index 6213826cc..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java +++ /dev/null @@ -1,178 +0,0 @@ -package mvm.rya.indexing.accumulo.temporal; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Arrays; - -import mvm.rya.indexing.TemporalInstant; -import mvm.rya.indexing.TemporalInterval; - -import org.joda.time.DateTime; -import org.junit.Assert; -import org.junit.Test; - -public class TemporalIntervalTest { - @Test - public void constructorTest() throws Exception { - TemporalInterval ti = new TemporalInterval( // - new TemporalInstantRfc3339(2014, 12, 30, 12, 59, 59), // - new TemporalInstantRfc3339(2014, 12, 30, 13, 00, 00)); // - Assert.assertNotNull(ti.getAsKeyBeginning()); - Assert.assertNotNull(ti.getHasEnd()); - } - @Test - public void constructorBadArgTest() throws Exception { - // the end precedes the beginning: - try { - TemporalInterval ti = new TemporalInterval( // - new TemporalInstantRfc3339(2017, 12, 30, 12, 59, 59), // - new TemporalInstantRfc3339( 820, 12, 30, 12, 59, 59)); // the invention of algebra. - Assert.assertFalse("Constructor should throw an error if the beginning is after the end, but no error for interval:"+ti, true); - }catch (IllegalArgumentException e) { - // expected to catch this error. - } - } - - @Test - public void relationsTest() throws Exception { - - TemporalInterval ti01 = new TemporalInterval( - new TemporalInstantRfc3339(2015, 12, 30, 12, 59, 59), // - new TemporalInstantRfc3339(2016, 12, 30, 13, 00, 00)); // - - TemporalInterval ti02 = new TemporalInterval( - new TemporalInstantRfc3339(2015, 12, 30, 12, 59, 59), // - new TemporalInstantRfc3339(2016, 12, 30, 13, 00, 00)); // - - Assert.assertTrue("same constructor parameters, should be equal.", - ti01.equals(ti02)); - Assert.assertTrue( - "same constructor parameters, should compare 0 equal.", - 0 == ti01.compareTo(ti02)); - - } - - @Test - public void keyBeginTest() throws Exception { - // 58 seconds, earlier - TemporalInterval beginTI01 = new TemporalInterval( - new TemporalInstantRfc3339(2015, 12, 30, 12, 59, 58), // - new TemporalInstantRfc3339(2016, 12, 30, 13, 00, 00)); // - // 59 seconds, later - TemporalInterval beginTI02 = new TemporalInterval( - new TemporalInstantRfc3339(2015, 12, 30, 12, 59, 59), // - new TemporalInstantRfc3339(2016, 12, 30, 13, 00, 00)); // - - String key01b = Arrays.toString(beginTI01.getAsKeyBeginning()); - String key02b = Arrays.toString(beginTI02.getAsKeyBeginning()); - Assert.assertEquals("key02 is later so comparesTo = 1.", 1, key02b.compareTo(key01b)); - Assert.assertEquals("key01 is first so comparesTo = -1", -1, key01b.compareTo(key02b)); - - } - // These are shared for end test and compareTo tests. - // tiB03_E20 read as: Begins 3 seconds, ends at 20 seconds - final TemporalInterval tiB03_E20 = new TemporalInterval(// - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 03), // - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 20)); - // 30 seconds, Begins earlier, ends later - final TemporalInterval tiB02_E30 = new TemporalInterval(// - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 02), // - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 30)); - // 30 seconds, same as above - final TemporalInterval tiB02_E30Dup = new TemporalInterval(// - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 02), // - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 30)); - // 30 seconds, same as above, but ends later - final TemporalInterval tiB02_E31 = new TemporalInterval(// - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 02), // - new TemporalInstantRfc3339(2015, 12, 30, 12, 00, 31)); - - @Test - public void CompareToTest() throws Exception { - - Assert.assertEquals("interval tiB03_E20.compareTo(tiB02_E30), B03 starts later(greater) so comparesTo = 1.", 1, tiB03_E20.compareTo(tiB02_E30)); - Assert.assertEquals("interval tiB02_E30.compareTo(tiB03_E20), B02 starts first(lesser) so comparesTo = -1", -1, tiB02_E30.compareTo(tiB03_E20)); - Assert.assertEquals("interval tiB02_E30.compareTo(tiB02_E31), E30 ends first so comparesTo = -1", -1, tiB02_E30.compareTo(tiB02_E31)); - Assert.assertEquals("interval tiB02_E30.compareTo(tiB02_E30Dup) same so comparesTo = 0", 0, tiB02_E30.compareTo(tiB02_E30Dup)); - } - @Test - public void EqualsTest() throws Exception { - Object notATemporalInterval = "Iamastring."; - Assert.assertFalse("interval tiB02_E30.equals(tiB02_E31) differ so equals() is false.", tiB02_E30.equals(notATemporalInterval)); - Assert.assertFalse("interval tiB02_E30.equals(tiB02_E31) differ so equals() is false.", tiB02_E30.equals(tiB02_E31)); - Assert.assertTrue ("interval tiB02_E30.equals(tiB02_E30Dup) same so equals() is true.", tiB02_E30.equals(tiB02_E30Dup)); - } - @Test - public void keyEndTest() throws Exception { - String keyB03_E20 = new String( tiB03_E20.getAsKeyEnd(), "US-ASCII"); - String keyB02_E30 = new String(tiB02_E30.getAsKeyEnd(), "US-ASCII"); - String keyB02_E30Dup = new String(tiB02_E30Dup.getAsKeyEnd(), "US-ASCII"); - - Assert.assertEquals("End keyB02_E30.compareTo(keyB03_E20), E30 is later = 1. key="+keyB02_E30, 1, keyB02_E30.compareTo(keyB03_E20)); - Assert.assertEquals("End keyB03_E20.compareTo(keyB02_E30), E20 is first = -1", -1, keyB03_E20.compareTo(keyB02_E30)); - Assert.assertEquals("End keyB02_E30.compareTo(keyB02_E30Dup) same so comparesTo = 0", 0, keyB02_E30.compareTo(keyB02_E30Dup)); - } - - - - @Test - public void infinitePastFutureAlwaysTest() throws Exception { - final TemporalInstant TestDateString = new TemporalInstantRfc3339(new DateTime("2015-01-01T01:59:59Z")); - TemporalInterval tvFuture = new TemporalInterval(TestDateString,TemporalInstantRfc3339.getMaximumInstance()); - TemporalInterval tvPast = new TemporalInterval(TemporalInstantRfc3339.getMinimumInstance(), TestDateString); - TemporalInterval tvAlways = new TemporalInterval(TemporalInstantRfc3339.getMinimumInstance(), TemporalInstantRfc3339.getMaximumInstance()); - Assert.assertTrue("The future is greater (starts after) than the past for compareTo().", tvFuture.compareTo(tvPast) > 0); - Assert.assertTrue("The future is greater (starts after) than always for compareTo().", tvFuture.compareTo(tvAlways) > 0); - Assert.assertTrue("The past is less (starts same, ends earlier) than always for compareTo().", tvFuture.compareTo(tvPast) > 0); - - } - @Test - public void hashTest() throws Exception { - // Use MAX to see how it handles overflowing values. Should silently go negative. - int hashcode01Same = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE / 2)), new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE)))).hashCode(); - int hashcode02Same = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE / 2)), new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE)))).hashCode(); - int hashcode03Diff = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE / 2)), new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE)))).hashCode(); - int hashcode04Diff = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(Integer.MIN_VALUE )), new TemporalInstantRfc3339(new DateTime(Integer.MIN_VALUE)))).hashCode(); - int hashcode05Diff = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE )), new TemporalInstantRfc3339(new DateTime(Integer.MAX_VALUE)))).hashCode(); - int hashcode06Diff = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(0)), new TemporalInstantRfc3339(new DateTime( 0)))).hashCode(); - int hashcode07Diff = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(1000)), new TemporalInstantRfc3339(new DateTime( 1000)))).hashCode(); - int hashcode08Diff = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(0)), new TemporalInstantRfc3339(new DateTime( 1000)))).hashCode(); - int hashcode09Diff = (new TemporalInterval((TemporalInstantRfc3339.getMinimumInstance()),(TemporalInstantRfc3339.getMaximumInstance()) )).hashCode(); - int hashcode10Diff = (new TemporalInterval(new TemporalInstantRfc3339(new DateTime(0)) ,(TemporalInstantRfc3339.getMaximumInstance()) )).hashCode(); - Assert.assertEquals("Same input should produce same hashcode. (always!)", hashcode01Same , hashcode02Same); - - Assert.assertTrue("Different small input should produce different hashcode. (usually!) hashcodes:" - +hashcode03Diff+" "+hashcode04Diff+" "+hashcode03Diff+" "+hashcode05Diff, - hashcode03Diff != hashcode04Diff && hashcode03Diff != hashcode05Diff); - - Assert.assertTrue("Different large input should produce different hashcode. (usually!) hashcodes:" - +hashcode06Diff +" "+ hashcode07Diff +" "+ hashcode06Diff +" "+ hashcode08Diff - +" key for date 0= "+(new TemporalInstantRfc3339(new DateTime(0))).getAsKeyString() - +" key for date 1000= "+(new TemporalInstantRfc3339(new DateTime(1000))).getAsKeyString(), - hashcode06Diff != hashcode07Diff && hashcode06Diff != hashcode08Diff); - Assert.assertTrue("Different max and min input should produce different hashcode. (usually!) hashcodes:" - +hashcode09Diff +" != "+ hashcode10Diff - +"fyi: key for date max= "+(TemporalInstantRfc3339.getMaximumInstance()).getAsKeyString() - + " key for date min= "+(TemporalInstantRfc3339.getMinimumInstance()).getAsKeyString(), - hashcode09Diff != hashcode10Diff ); - - } -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java deleted file mode 100644 index 98acf3916..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java +++ /dev/null @@ -1,831 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import java.util.Map.Entry; - -import junit.framework.Assert; -import mvm.rya.indexing.IndexPlanValidator.IndexPlanValidator; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; -import mvm.rya.indexing.external.tupleSet.ExternalProcessorTest.ExternalTupleVstor; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.URI; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; -import org.openrdf.sail.SailException; -import org.openrdf.sail.memory.MemoryStore; - -import com.beust.jcommander.internal.Sets; -import com.google.common.collect.Lists; - -public class AccumuloConstantIndexSetTest { - - - private SailRepositoryConnection conn; - private Connector accCon; - String tablename = "table"; - Sail s; - URI obj, obj2, subclass, subclass2, talksTo; - - @Before - public void init() throws RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, AccumuloException, AccumuloSecurityException, TableExistsException { - - s = new MemoryStore(); - SailRepository repo = new SailRepository(s); - repo.initialize(); - conn = repo.getConnection(); - - URI sub = new URIImpl("uri:entity"); - subclass = new URIImpl("uri:class"); - obj = new URIImpl("uri:obj"); - talksTo = new URIImpl("uri:talksTo"); - - conn.add(sub, RDF.TYPE, subclass); - conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); - conn.add(sub, talksTo, obj); - - URI sub2 = new URIImpl("uri:entity2"); - subclass2 = new URIImpl("uri:class2"); - obj2 = new URIImpl("uri:obj2"); - - conn.add(sub2, RDF.TYPE, subclass2); - conn.add(sub2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub2, talksTo, obj2); - - accCon = new MockInstance().getConnector("root", "".getBytes()); - accCon.tableOperations().create(tablename); - - } - - - - - @Test - public void testEvaluateTwoIndexVarInstantiate1() { - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - // TODO Auto-generated catch block - e5.printStackTrace(); - } - - try { - if (accCon.tableOperations().exists("table2")) { - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - // TODO Auto-generated catch block - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - // TODO Auto-generated catch block - e4.printStackTrace(); - } catch (TableExistsException e4) { - // TODO Auto-generated catch block - e4.printStackTrace(); - } catch (TableNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - // TODO Auto-generated catch block - e3.printStackTrace(); - } - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?c ?l ?f ?o " // - + "{" // - + " a ?c . "// - + " ?l. "// - + " ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (SailException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MutationsRejectedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TableNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (RepositoryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - index.add(ais1); - index.add(ais2); - - ExternalProcessor processor = new ExternalProcessor(index); - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - } - - - - - - @Test - public void testEvaluateThreeIndexVarInstantiate() { - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - URI sub = new URIImpl("uri:entity"); - subclass = new URIImpl("uri:class"); - obj = new URIImpl("uri:obj"); - talksTo = new URIImpl("uri:talksTo"); - - URI howlsAt = new URIImpl("uri:howlsAt"); - URI subType = new URIImpl("uri:subType"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub, howlsAt, superclass); - conn.add(superclass, subType, obj); - } catch (RepositoryException e5) { - // TODO Auto-generated catch block - e5.printStackTrace(); - } - - try { - if (accCon.tableOperations().exists("table2")) { - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - - if (accCon.tableOperations().exists("table3")) { - accCon.tableOperations().delete("table3"); - } - accCon.tableOperations().create("table3"); - } catch (AccumuloException e4) { - // TODO Auto-generated catch block - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - // TODO Auto-generated catch block - e4.printStackTrace(); - } catch (TableExistsException e4) { - // TODO Auto-generated catch block - e4.printStackTrace(); - } catch (TableNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - // TODO Auto-generated catch block - e3.printStackTrace(); - } - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String indexSparqlString3 = ""// - + "SELECT ?wolf ?sheep ?chicken " // - + "{" // - + " ?wolf ?sheep . "// - + " ?sheep ?chicken. "// - + "}";// - - String queryString = ""// - + "SELECT ?c ?l ?f ?o " // - + "{" // - + " a ?c . "// - + " ?l. "// - + " ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + " ?f. "// - + " ?f . "// - + "}";// - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - AccumuloIndexSet ais3 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - ais3 = new AccumuloIndexSet(indexSparqlString3, conn, accCon, "table3"); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (SailException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MutationsRejectedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TableNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - index.add(ais1); - index.add(ais3); - index.add(ais2); - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (RepositoryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - - - - -// Scanner s = null; -// try { -// s = accCon.createScanner("table3", new Authorizations()); -// } catch (TableNotFoundException e) { -// // TODO Auto-generated catch block -// e.printStackTrace(); -// } -// s.setRange(new Range()); -// Iterator> i = s.iterator(); -// -// while (i.hasNext()) { -// Entry entry = i.next(); -// Key k = entry.getKey(); -// System.out.println(k); -// -// } - - - - - - - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - - - - - } - - - - - - - @Test - public void testEvaluateFilterInstantiate() { - - URI e1 = new URIImpl("uri:e1"); - URI e2 = new URIImpl("uri:e2"); - URI e3 = new URIImpl("uri:e3"); - URI f1 = new URIImpl("uri:f1"); - URI f2 = new URIImpl("uri:f2"); - URI f3 = new URIImpl("uri:f3"); - URI g1 = new URIImpl("uri:g1"); - URI g2 = new URIImpl("uri:g2"); - URI g3 = new URIImpl("uri:g3"); - - - - try { - conn.add(e1, talksTo, f1); - conn.add(f1, talksTo, g1); - conn.add(g1, talksTo, e1); - conn.add(e2, talksTo, f2); - conn.add(f2, talksTo, g2); - conn.add(g2, talksTo, e2); - conn.add(e3, talksTo, f3); - conn.add(f3, talksTo, g3); - conn.add(g3, talksTo, e3); - } catch (RepositoryException e5) { - // TODO Auto-generated catch block - e5.printStackTrace(); - } - - - String queryString = ""// - + "SELECT ?x ?y ?z " // - + "{" // - + "Filter(?x = ) . " // - + " ?x ?y. " // - + " ?y ?z. " // - + " ?z . " // - + "}";// - - - - String indexSparqlString = ""// - + "SELECT ?a ?b ?c ?d " // - + "{" // - + "Filter(?a = ?d) . " // - + " ?a ?b. " // - + " ?b ?c. " // - + " ?c ?d. " // - + "}";// - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (SailException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MutationsRejectedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TableNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - index.add(ais1); - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - - - - - - - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - - - - - } - - - - - @Test - public void testEvaluateCompoundFilterInstantiate() { - - URI e1 = new URIImpl("uri:e1"); - URI f1 = new URIImpl("uri:f1"); - - - try { - conn.add(e1, talksTo, e1); - conn.add(e1, talksTo, f1); - conn.add(f1, talksTo, e1); - - } catch (RepositoryException e5) { - // TODO Auto-generated catch block - e5.printStackTrace(); - } - - - String queryString = ""// - + "SELECT ?x ?y ?z " // - + "{" // - + "Filter(?x = && ?y = ) . " // - + " ?x ?y. " // - + " ?y ?z. " // - + " ?z . " // - + "}";// - - - - String indexSparqlString = ""// - + "SELECT ?a ?b ?c ?d " // - + "{" // - + "Filter(?a = ?d && ?b = ?d) . " // - + " ?a ?b. " // - + " ?b ?c. " // - + " ?c ?d. " // - + "}";// - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (SailException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MutationsRejectedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TableNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - index.add(ais1); - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - -// System.out.println("Counts are " + crh1.getCount() + " and " + crh2.getCount()); -// -// -// Scanner s = null; -// try { -// s = accCon.createScanner(tablename, new Authorizations()); -// } catch (TableNotFoundException e) { -// // TODO Auto-generated catch block -// e.printStackTrace(); -// } -// s.setRange(new Range()); -// Iterator> i = s.iterator(); -// -// while (i.hasNext()) { -// Entry entry = i.next(); -// Key k = entry.getKey(); -// System.out.println(k); -// -// } - - - Assert.assertEquals(2, crh1.getCount()); - - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - - - - } - - - - - - - - - - - - - - - - public static class CountingResultHandler implements TupleQueryResultHandler { - private int count = 0; - - public int getCount() { - return count; - } - - public void resetCount() { - this.count = 0; - } - - @Override - public void startQueryResult(List arg0) throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException { - count++; - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - } - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java deleted file mode 100644 index c8ea57d39..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java +++ /dev/null @@ -1,4330 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; -import java.util.Set; - -import junit.framework.Assert; -import mvm.rya.indexing.IndexPlanValidator.IndexPlanValidator; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; -import mvm.rya.indexing.external.tupleSet.ExternalProcessorTest.ExternalTupleVstor; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.URI; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; -import org.openrdf.sail.SailException; -import org.openrdf.sail.memory.MemoryStore; - -import com.beust.jcommander.internal.Sets; -import com.google.common.collect.Lists; - -public class AccumuloIndexSetTest { - - private SailRepositoryConnection conn; - private Connector accCon; - String tablename = "table"; - Sail s; - URI obj,obj2,subclass, subclass2, talksTo; - - - - - @Before - public void init() throws RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, AccumuloException, AccumuloSecurityException, TableExistsException { - - s = new MemoryStore(); - SailRepository repo = new SailRepository(s); - repo.initialize(); - conn = repo.getConnection(); - - URI sub = new URIImpl("uri:entity"); - subclass = new URIImpl("uri:class"); - obj = new URIImpl("uri:obj"); - talksTo = new URIImpl("uri:talksTo"); - - conn.add(sub, RDF.TYPE, subclass); - conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); - conn.add(sub, talksTo, obj); - - URI sub2 = new URIImpl("uri:entity2"); - subclass2 = new URIImpl("uri:class2"); - obj2 = new URIImpl("uri:obj2"); - - conn.add(sub2, RDF.TYPE, subclass2); - conn.add(sub2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub2, talksTo, obj2); - - accCon = new MockInstance().getConnector("root", "".getBytes()); - accCon.tableOperations().create(tablename); - - - - - } - - - - - - @Test - public void testEvaluateSingeIndex() { - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - CountingResultHandler crh = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais = null; - - try { - ais = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - -// Scanner scan = null; -// try { -// scan = accCon.createScanner(tablename, new Authorizations("auths")); -// } catch (TableNotFoundException e) { -// -// e.printStackTrace(); -// } - -// scan.setRange(new Range()); -// -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } - - - - - index.add(ais); - - Assert.assertEquals((double)crh.getCount(), ais.cardinality()); - - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - CountingResultHandler crh2 = new CountingResultHandler(); - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - } - - - - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder1() { - - - try { - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - -// Scanner scan = null; -// try { -// scan = accCon.createScanner(tablename, new Authorizations("auths")); -// } catch (TableNotFoundException e) { -// -// e.printStackTrace(); -// } - -// scan.setRange(new Range()); -// -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } - - - - - index.add(ais1); - index.add(ais2); - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(2, crh1.getCount()); - Assert.assertEquals(2, crh2.getCount()); - - - - - - } - - - - - - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder2() { - - - try { - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - - index.add(ais1); - index.add(ais2); - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(2, crh1.getCount()); - Assert.assertEquals(2, crh2.getCount()); - - - - } - - - - - - @Test - public void testEvaluateTwoIndexTwoVarInvalidOrder() { - - - try { - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?e ?c ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - - - index.add(ais1); - index.add(ais2); - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - - } - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder1() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?c ?l ?f ?o" // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - - index.add(ais1); - index.add(ais2); - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(2, crh1.getCount()); - Assert.assertEquals(2, crh2.getCount()); - - - - } - - - - - - - - //@Test - public void testEvaluateTwoIndexThreeVarsDiffLabel() { - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?owl " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?owl "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?c ?l ?f ?o" // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); -// try { -// conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); -// conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); -// } catch (TupleQueryResultHandlerException e2) { -// -// e2.printStackTrace(); -// } catch (QueryEvaluationException e2) { -// -// e2.printStackTrace(); -// } catch (MalformedQueryException e2) { -// -// e2.printStackTrace(); -// } catch (RepositoryException e2) { -// -// e2.printStackTrace(); -// } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - -// -// Scanner scan = null; -// try { -// scan = accCon.createScanner(tablename, new Authorizations("auths")); -// } catch (TableNotFoundException e) { -// -// e.printStackTrace(); -// } -// -// scan.setRange(new Range()); -// -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } - - - - index.add(ais2); - index.add(ais1); - - -// Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); -// Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); -// -// -// crh1 = new CountingResultHandler(); -// crh2 = new CountingResultHandler(); -// -// -// try { -// conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); -// } catch (TupleQueryResultHandlerException e1) { -// -// e1.printStackTrace(); -// } catch (QueryEvaluationException e1) { -// -// e1.printStackTrace(); -// } catch (MalformedQueryException e1) { -// -// e1.printStackTrace(); -// } catch (RepositoryException e1) { -// -// e1.printStackTrace(); -// } - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - processor.process(pq.getTupleExpr()); - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - // Assert.assertEquals(2, crh1.getCount()); - Assert.assertEquals(2, crh2.getCount()); - - - - } - - - - - - - - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder2() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - -// -// try { -// conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); -// } catch (TupleQueryResultHandlerException e1) { -// -// e1.printStackTrace(); -// } catch (QueryEvaluationException e1) { -// -// e1.printStackTrace(); -// } catch (MalformedQueryException e1) { -// -// e1.printStackTrace(); -// } catch (RepositoryException e1) { -// -// e1.printStackTrace(); -// } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - // Assert.assertEquals(2, crh1.getCount()); - Assert.assertEquals(2, crh2.getCount()); - - - - } - - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder3ThreeBindingSet() { - - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - - try { - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?l ?e ?c " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - -// Scanner scan1 = null; -// Scanner scan2 = null; -// try { -// scan1 = accCon.createScanner(tablename, new Authorizations("auths")); -// scan2 = accCon.createScanner("table2", new Authorizations("auths")); -// } catch (TableNotFoundException e) { -// -// e.printStackTrace(); -// } -// -// scan1.setRange(new Range()); -// -// for (Map.Entry entry : scan1) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } -// -// -// scan2.setRange(new Range()); -// -// for (Map.Entry entry : scan2) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } - - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - // new SPARQLResultsXMLWriter(System.out) - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(3, crh1.getCount()); - Assert.assertEquals(3, crh2.getCount()); - - - - } - - - - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder5ThreeBindingSet() { - - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - - try { - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?l ?c " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - // new SPARQLResultsXMLWriter(System.out) - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(3, crh1.getCount()); - Assert.assertEquals(3, crh2.getCount()); - - - - } - - - - - - - - - - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder4ThreeBindingSet() { - - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - - try { - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?c ?e ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - -// Scanner scan1 = null; -// Scanner scan2 = null; -// try { -// scan1 = accCon.createScanner(tablename, new Authorizations("auths")); -// scan2 = accCon.createScanner("table2", new Authorizations("auths")); -// } catch (TableNotFoundException e) { -// -// e.printStackTrace(); -// } -// -// scan1.setRange(new Range()); -// -// for (Map.Entry entry : scan1) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } -// -// -// scan2.setRange(new Range()); -// -// for (Map.Entry entry : scan2) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } - - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - //new SPARQLResultsXMLWriter(System.out) - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(3, crh1.getCount()); - Assert.assertEquals(3, crh2.getCount()); - - - - } - - - - - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder6ThreeBindingSet() { - - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - - try { - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?c ?l ?e ?o ?f " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - //new SPARQLResultsXMLWriter(System.out) - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(3, crh1.getCount()); - Assert.assertEquals(3, crh2.getCount()); - - - - } - - - - - - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder7ThreeBindingSet() { - - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - - try { - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?l ?c ?e ?f " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - //new SPARQLResultsXMLWriter(System.out) - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(3, crh1.getCount()); - Assert.assertEquals(3, crh2.getCount()); - - - - } - - - - - - - - - - - - - @Test - public void testEvaluateTwoIndexThreeVarInvalidOrder1() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?f ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - - - } - - - - @Test - public void testEvaluateTwoIndexThreeVarInvalidOrder2() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?e ?f ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - boolean throwsException = false; - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } catch(IllegalStateException e) { - throwsException = true; - } - - Assert.assertTrue(throwsException); - - - - - } - - - - @Test - public void testEvaluateOneIndex() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - index.add(ais1); - ExternalProcessor processor = new ExternalProcessor(index); - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - - - } - - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder3() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - -// System.out.println("Counts are " + crh1.getCount() + " and " + crh2.getCount()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - - - } - - - - - @Test - public void testSupportedVarOrders1() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String indexSparqlString3 = ""// - + "SELECT ?a ?b ?c " // - + "{" // - + " ?b a ?a . "// - + " ?b ?c "// - + "}";// - - - - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - Set ais1Set1 = Sets.newHashSet(); - ais1Set1.add("dog"); - - Assert.assertTrue(ais1.supportsBindingSet(ais1Set1)); - ais1Set1.add("duck"); - - Assert.assertTrue(ais1.supportsBindingSet(ais1Set1)); - - ais1Set1.add("chicken"); - - Assert.assertTrue(ais1.supportsBindingSet(ais1Set1)); - - - Set ais2Set1 = Sets.newHashSet(); - ais2Set1.add("f"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set1)); - ais2Set1.add("e"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set1)); - - ais2Set1.add("o"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set1)); - - ais2Set1.add("l"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set1)); - - Set ais2Set2 = Sets.newHashSet(); - ais2Set2.add("f"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set2)); - - ais2Set2.add("o"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set2)); - - ais2Set2.add("c"); - - Assert.assertTrue(!ais2.supportsBindingSet(ais2Set2)); - - Set ais2Set3 = Sets.newHashSet(); - ais2Set3.add("c"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set3)); - - ais2Set3.add("e"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set3)); - - ais2Set3.add("l"); - - Assert.assertTrue(ais2.supportsBindingSet(ais2Set3)); - - - List eList = Lists.newArrayList(); - eList.add(ais1); - SPARQLParser p = new SPARQLParser(); - ParsedQuery pq = null; - try { - pq = p.parseQuery(indexSparqlString3, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } -// -// System.out.println("Supported single order is " + ais1.getSupportedVariableOrders()); -// -// IndexedExecutionPlanGenerator iep = new IndexedExecutionPlanGenerator(pq.getTupleExpr(), eList); -// List indices = iep.getNormalizedIndices(); -// System.out.println("Number of indices is " + indices.size()); -// -// for(ExternalTupleSet e: indices) { -// System.out.println("Index is " + e.getTupleExpr() + " and supported orders are " + e.getSupportedVariableOrders()); -// } - - - } - - - - - @Test - public void testEvaluateTwoIndexThreeVarOrder() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - index.add(ais2); - index.add(ais1); - - - Assert.assertEquals((double)crh1.getCount(), ais1.cardinality()); - Assert.assertEquals((double)crh2.getCount(), ais2.cardinality()); - -// System.out.println("Counts are " + crh1.getCount() + " and " + crh2.getCount()); - - - crh1 = new CountingResultHandler(); - crh2 = new CountingResultHandler(); - - - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1); - } catch (TupleQueryResultHandlerException e1) { - - e1.printStackTrace(); - } catch (QueryEvaluationException e1) { - - e1.printStackTrace(); - } catch (MalformedQueryException e1) { - - e1.printStackTrace(); - } catch (RepositoryException e1) { - - e1.printStackTrace(); - } - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - - - Sail processingSail = new ExternalSail(s, processor); - SailRepository smartSailRepo = new SailRepository(processingSail); - try { - smartSailRepo.initialize(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - - - - - try { - smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2); - } catch (TupleQueryResultHandlerException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (RepositoryException e) { - - e.printStackTrace(); - } - Assert.assertEquals(crh1.getCount(), crh2.getCount()); - - - - - } - - - - - - @Test - public void testEvaluateTwoIndexValidate() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - index.add(ais1); - index.add(ais2); - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - List teList = Lists.newArrayList(); - TupleExpr te = processor.process(pq.getTupleExpr()); - - ExternalTupleVstor etn = new ExternalTupleVstor(); - te.visit(etn); - - teList.add(te); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - - Assert.assertTrue(ipv.isValid(te)); - - - - - } - - - - - - - @Test - public void testEvaluateThreeIndexValidate() { - - - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - URI sub = new URIImpl("uri:entity"); - subclass = new URIImpl("uri:class"); - obj = new URIImpl("uri:obj"); - talksTo = new URIImpl("uri:talksTo"); - - URI howlsAt = new URIImpl("uri:howlsAt"); - URI subType = new URIImpl("uri:subType"); - URI superSuperclass = new URIImpl("uri:super_superclass"); - - - try { - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub, howlsAt, superclass); - conn.add(superclass, subType,superSuperclass); - } catch (RepositoryException e5) { - - e5.printStackTrace(); - } - - - try { - if(accCon.tableOperations().exists("table2")){ - accCon.tableOperations().delete("table2"); - } - accCon.tableOperations().create("table2"); - - if(accCon.tableOperations().exists("table3")){ - accCon.tableOperations().delete("table3"); - } - accCon.tableOperations().create("table3"); - } catch (AccumuloException e4) { - - e4.printStackTrace(); - } catch (AccumuloSecurityException e4) { - - e4.printStackTrace(); - } catch (TableExistsException e4) { - - e4.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - try { - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - } catch (RepositoryException e3) { - - e3.printStackTrace(); - } - - - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - - - String indexSparqlString3 = ""// - + "SELECT ?wolf ?sheep ?chicken " // - + "{" // - + " ?wolf ?sheep . "// - + " ?sheep ?chicken. "// - + "}";// - - - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + " ?e ?f. "// - + " ?f ?o. "// - + "}";// - - - - - - - - CountingResultHandler crh1 = new CountingResultHandler(); - CountingResultHandler crh2 = new CountingResultHandler(); - try { - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh1); - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString2).evaluate(crh2); - } catch (TupleQueryResultHandlerException e2) { - - e2.printStackTrace(); - } catch (QueryEvaluationException e2) { - - e2.printStackTrace(); - } catch (MalformedQueryException e2) { - - e2.printStackTrace(); - } catch (RepositoryException e2) { - - e2.printStackTrace(); - } - - - - List index = Lists.newArrayList(); - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - AccumuloIndexSet ais3 = null; - - try { - ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename); - ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - ais3 = new AccumuloIndexSet(indexSparqlString3, conn, accCon, "table3"); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } catch (SailException e) { - - e.printStackTrace(); - } catch (QueryEvaluationException e) { - - e.printStackTrace(); - } catch (MutationsRejectedException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - index.add(ais1); - index.add(ais3); - index.add(ais2); - - - - - ParsedQuery pq = null; - SPARQLParser sp = new SPARQLParser(); - try { - pq = sp.parseQuery(queryString, null); - } catch (MalformedQueryException e) { - - e.printStackTrace(); - } - - ExternalProcessor processor = new ExternalProcessor(index); - - List teList = Lists.newArrayList(); - TupleExpr te = processor.process(pq.getTupleExpr()); - -// ExternalTupleVstor etn = new ExternalTupleVstor(); -// te.visit(etn); -// -// for(QueryModelNode q: etn.getExtTup()) { -// System.out.println("Ext tup maps are " + ((ExternalTupleSet)q).getTableVarMap()); -// } -// - teList.add(te); - - IndexPlanValidator ipv = new IndexPlanValidator(false); - - Assert.assertTrue(ipv.isValid(te)); - - - - - } - - - - - - - - - - - - - - public static class CountingResultHandler implements TupleQueryResultHandler { - private int count = 0; - - public int getCount() { - return count; - } - - public void resetCount() { - this.count = 0; - } - - @Override - public void startQueryResult(List arg0) throws TupleQueryResultHandlerException { - } - - - @Override - public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException { - count++; - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - } - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java deleted file mode 100644 index f93f58a0f..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java +++ /dev/null @@ -1,803 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.Arrays; -import java.util.List; - -import junit.framework.Assert; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.indexing.RyaSailFactory; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.URI; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; -import org.openrdf.sail.SailException; - -public class AccumuloIndexSetTest2 { - - private SailRepositoryConnection conn; - private SailRepository repo; - private Connector accCon; - String tablePrefix = "table_"; - AccumuloRdfConfiguration conf; - URI sub, sub2, obj, obj2, subclass, subclass2, talksTo; - - @Before - public void init() throws RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, AccumuloException, AccumuloSecurityException, TableExistsException, - RyaDAOException { - - conf = new AccumuloRdfConfiguration(); - conf.set(ConfigUtils.USE_PCJ, "true"); - conf.set(ConfigUtils.USE_MOCK_INSTANCE, "true"); - conf.set(ConfigUtils.CLOUDBASE_INSTANCE, "instance"); - conf.setTablePrefix(tablePrefix); - conf.setPcjTables(Arrays.asList("table1", "table2")); - - Sail sail = RyaSailFactory.getInstance(conf); - repo = new SailRepository(sail); - repo.initialize(); - conn = repo.getConnection(); - - sub = new URIImpl("uri:entity"); - subclass = new URIImpl("uri:class"); - obj = new URIImpl("uri:obj"); - talksTo = new URIImpl("uri:talksTo"); - - conn.add(sub, RDF.TYPE, subclass); - conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); - conn.add(sub, talksTo, obj); - - sub2 = new URIImpl("uri:entity2"); - subclass2 = new URIImpl("uri:class2"); - obj2 = new URIImpl("uri:obj2"); - - conn.add(sub2, RDF.TYPE, subclass2); - conn.add(sub2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub2, talksTo, obj2); - - accCon = new MockInstance("instance").getConnector("root", new PasswordToken("".getBytes())); - accCon.tableOperations().create("table1"); - accCon.tableOperations().create("table2"); - - } - - @After - public void close() throws RepositoryException, AccumuloException, AccumuloSecurityException, - TableNotFoundException { - - conf = null; - conn.close(); - accCon.tableOperations().delete(tablePrefix + "spo"); - accCon.tableOperations().delete(tablePrefix + "po"); - accCon.tableOperations().delete(tablePrefix + "osp"); - - if (accCon.tableOperations().exists("table1")) { - accCon.tableOperations().delete("table1"); - } - - if (accCon.tableOperations().exists("table2")) { - accCon.tableOperations().delete("table2"); - } - - } - - @Test - public void testEvaluateTwoIndexTwoVarOrder2() throws RepositoryException, MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException, - TupleQueryResultHandlerException { - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexTwoVarInvalidOrder() throws RepositoryException, MalformedQueryException, - SailException, QueryEvaluationException, MutationsRejectedException, TableNotFoundException, - TupleQueryResultHandlerException { - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - String indexSparqlString = ""// - + "SELECT ?e ?c ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder1() throws MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException, RepositoryException, - TupleQueryResultHandlerException { - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?e ?c ?l ?f ?o" // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - } - - // @Test - public void testEvaluateTwoIndexThreeVarsDiffLabel() throws RepositoryException, MalformedQueryException, - SailException, QueryEvaluationException, MutationsRejectedException, TableNotFoundException, - TupleQueryResultHandlerException { - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?owl " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?owl "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?e ?c ?l ?f ?o" // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder2() throws RepositoryException, MalformedQueryException, - SailException, QueryEvaluationException, MutationsRejectedException, TableNotFoundException, - TupleQueryResultHandlerException { - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder3ThreeBindingSet() throws TupleQueryResultHandlerException, - QueryEvaluationException, MalformedQueryException, RepositoryException, SailException, - MutationsRejectedException, TableNotFoundException { - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?l ?e ?c " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(3, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder5ThreeBindingSet() throws MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException, RepositoryException, - TupleQueryResultHandlerException { - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?l ?c " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(3, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder4ThreeBindingSet() throws MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException, RepositoryException, - TupleQueryResultHandlerException { - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - // TODO Auto-generated method stub - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?c ?e ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(3, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder6ThreeBindingSet() throws MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException, RepositoryException, - TupleQueryResultHandlerException { - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?c ?l ?e ?o ?f " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(3, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder7ThreeBindingSet() throws MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException, RepositoryException, - TupleQueryResultHandlerException { - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - URI obj3 = new URIImpl("uri:obj3"); - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - URI superclass3 = new URIImpl("uri:superclass3"); - - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3")); - conn.add(sub3, talksTo, obj3); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(subclass3, RDF.TYPE, superclass3); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3")); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - String indexSparqlString = ""// - + "SELECT ?c ?e ?l " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?l ?c ?e ?f " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(3, crh.getCount()); - - } - - @Test - public void testEvaluateOneIndex() throws RepositoryException, MalformedQueryException, SailException, - QueryEvaluationException, MutationsRejectedException, TableNotFoundException, - TupleQueryResultHandlerException { - - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - } - - @Test - public void testEvaluateTwoIndexThreeVarOrder3() throws RepositoryException, MalformedQueryException, - SailException, QueryEvaluationException, MutationsRejectedException, TableNotFoundException, - TupleQueryResultHandlerException { - - URI superclass = new URIImpl("uri:superclass"); - URI superclass2 = new URIImpl("uri:superclass2"); - - conn.add(subclass, RDF.TYPE, superclass); - conn.add(subclass2, RDF.TYPE, superclass2); - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - String indexSparqlString = ""// - + "SELECT ?dog ?pig ?duck " // - + "{" // - + " ?pig a ?dog . "// - + " ?pig ?duck "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?o ?f ?e ?c ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?f ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l. "// - + " ?e ?o . "// - + " ?o ?l. "// - + " ?c a ?f . " // - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, "table1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, "table2"); - - CountingResultHandler crh = new CountingResultHandler(); - - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - } - - public static class CountingResultHandler implements TupleQueryResultHandler { - private int count = 0; - - public int getCount() { - return count; - } - - public void resetCount() { - this.count = 0; - } - - @Override - public void startQueryResult(List arg0) throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException { - count++; - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - } - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java deleted file mode 100644 index f7f3cbf97..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java +++ /dev/null @@ -1,550 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import junit.framework.Assert; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.indexing.RyaSailFactory; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.conf.Configuration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.URI; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; -import org.openrdf.sail.SailException; -import org.openrdf.sail.memory.MemoryStore; - -public class PrecompJoinOptimizerIntegrationTest { - - private SailRepositoryConnection conn; - private SailRepository repo; - private Connector accCon; - String tablePrefix = "table_"; - AccumuloRdfConfiguration conf; - URI sub, sub2, obj,obj2,subclass, subclass2, talksTo; - - - - - @Before - public void init() throws RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, - AccumuloException, AccumuloSecurityException, TableExistsException, RyaDAOException { - - conf = new AccumuloRdfConfiguration(); - conf.set(ConfigUtils.USE_PCJ, "true"); - conf.set(ConfigUtils.USE_MOCK_INSTANCE,"true"); - conf.set(ConfigUtils.CLOUDBASE_INSTANCE, "instance"); - conf.setTablePrefix(tablePrefix); - - Sail sail = RyaSailFactory.getInstance(conf); - repo = new SailRepository(sail); - repo.initialize(); - conn = repo.getConnection(); - - sub = new URIImpl("uri:entity"); - subclass = new URIImpl("uri:class"); - obj = new URIImpl("uri:obj"); - talksTo = new URIImpl("uri:talksTo"); - - conn.add(sub, RDF.TYPE, subclass); - conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); - conn.add(sub, talksTo, obj); - - sub2 = new URIImpl("uri:entity2"); - subclass2 = new URIImpl("uri:class2"); - obj2 = new URIImpl("uri:obj2"); - - conn.add(sub2, RDF.TYPE, subclass2); - conn.add(sub2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub2, talksTo, obj2); - - accCon = new MockInstance("instance").getConnector("root",new PasswordToken("".getBytes())); - - } - - - @After - public void close() throws RepositoryException, AccumuloException, AccumuloSecurityException, TableNotFoundException { - - conf = null; - conn.close(); - accCon.tableOperations().delete(tablePrefix + "spo"); - accCon.tableOperations().delete(tablePrefix + "po"); - accCon.tableOperations().delete(tablePrefix + "osp"); - } - - - - @Test - public void testEvaluateSingeIndex() throws TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, RepositoryException, AccumuloException, - AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException { - - if (accCon.tableOperations().exists(tablePrefix + "INDEX1")) { - accCon.tableOperations().delete(tablePrefix + "INDEX1"); - } - accCon.tableOperations().create(tablePrefix + "INDEX1"); - - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - AccumuloIndexSet ais = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablePrefix + "INDEX1"); - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - CountingResultHandler crh = new CountingResultHandler(); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - -// Scanner scan = accCon.createScanner(tablePrefix + "spo", new Authorizations("U")); -// -// for(Entry e: scan) { -// System.out.println(e.getKey().getRow()); -// } - - Assert.assertEquals(2, crh.getCount()); - - - } - - - - - - - @Test - public void testEvaluateTwoIndexTwoVarOrder1() throws AccumuloException, AccumuloSecurityException, - TableExistsException, RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, - TableNotFoundException, TupleQueryResultHandlerException, RyaDAOException { - - if (accCon.tableOperations().exists(tablePrefix + "INDEX1")) { - accCon.tableOperations().delete(tablePrefix + "INDEX1"); - } - - if (accCon.tableOperations().exists(tablePrefix + "INDEX2")) { - accCon.tableOperations().delete(tablePrefix + "INDEX2"); - } - - accCon.tableOperations().create(tablePrefix + "INDEX1"); - accCon.tableOperations().create(tablePrefix + "INDEX2"); - - - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - - - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?e ?o ?l " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablePrefix + "INDEX1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, tablePrefix + "INDEX2"); - - CountingResultHandler crh = new CountingResultHandler(); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(2, crh.getCount()); - - - - - } - - - @Test - public void testEvaluateSingeFilterIndex() throws TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, RepositoryException, AccumuloException, - AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException { - - if (accCon.tableOperations().exists(tablePrefix + "INDEX1")) { - accCon.tableOperations().delete(tablePrefix + "INDEX1"); - } - accCon.tableOperations().create(tablePrefix + "INDEX1"); - - String indexSparqlString = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " Filter(?e = ) " // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - AccumuloIndexSet ais = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablePrefix + "INDEX1"); - - - String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " Filter(?e = ) " // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - CountingResultHandler crh = new CountingResultHandler(); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(1, crh.getCount()); - - - } - - - - - @Test - public void testEvaluateSingeFilterWithUnion() throws TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, RepositoryException, AccumuloException, - AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException { - - if (accCon.tableOperations().exists(tablePrefix + "INDEX2")) { - accCon.tableOperations().delete(tablePrefix + "INDEX2"); - } - accCon.tableOperations().create(tablePrefix + "INDEX2"); - - String indexSparqlString2 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " Filter(?l = \"label2\") " // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, tablePrefix + "INDEX2"); - - - String queryString = ""// - + "SELECT ?e ?c ?o ?m ?l" // - + "{" // - + " Filter(?l = \"label2\") " // - + " ?e ?o . "// - + " { ?e a ?c . ?e ?m }"// - + " UNION { ?e a ?c . ?e ?l }"// - + "}";// - - CountingResultHandler crh = new CountingResultHandler(); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(1, crh.getCount()); - - - } - - - - @Test - public void testEvaluateSingeFilterWithLeftJoin() throws TupleQueryResultHandlerException, QueryEvaluationException, - MalformedQueryException, RepositoryException, AccumuloException, - AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException { - - if (accCon.tableOperations().exists(tablePrefix + "INDEX1")) { - accCon.tableOperations().delete(tablePrefix + "INDEX1"); - } - accCon.tableOperations().create(tablePrefix + "INDEX1"); - - String indexSparqlString1 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " Filter(?l = \"label3\") " // - + " ?e a ?c . "// - + " ?e ?l "// - + "}";// - - - URI sub3 = new URIImpl("uri:entity3"); - URI subclass3 = new URIImpl("uri:class3"); - conn.add(sub3, RDF.TYPE, subclass3); - conn.add(sub3,RDFS.LABEL, new LiteralImpl("label3")); - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString1, conn, accCon, tablePrefix + "INDEX1"); - - String queryString = ""// - + "SELECT ?e ?c ?o ?m ?l" // - + "{" // - + " Filter(?l = \"label3\") " // - + " ?e a ?c . " // - + " ?e ?l . " // - + " OPTIONAL { ?e ?o . ?e ?m }"// - + "}";// - - CountingResultHandler crh = new CountingResultHandler(); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(1, crh.getCount()); - - - } - - - - - - - - @Test - public void testEvaluateTwoIndexUnionFilter() throws AccumuloException, AccumuloSecurityException, - TableExistsException, RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, - TableNotFoundException, TupleQueryResultHandlerException, RyaDAOException { - - if (accCon.tableOperations().exists(tablePrefix + "INDEX1")) { - accCon.tableOperations().delete(tablePrefix + "INDEX1"); - } - - if (accCon.tableOperations().exists(tablePrefix + "INDEX2")) { - accCon.tableOperations().delete(tablePrefix + "INDEX2"); - } - - accCon.tableOperations().create(tablePrefix + "INDEX1"); - accCon.tableOperations().create(tablePrefix + "INDEX2"); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub, RDF.TYPE, obj); - conn.add(sub2, RDF.TYPE, obj2); - - - String indexSparqlString = ""// - + "SELECT ?e ?l ?o " // - + "{" // - + " Filter(?l = \"label2\") " // - + " ?e a ?o . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?e ?l ?o " // - + "{" // - + " Filter(?l = \"label2\") " // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - String queryString = ""// - + "SELECT ?c ?e ?l ?o " // - + "{" // - + " Filter(?l = \"label2\") " // - + " ?e a ?c . "// - + " { ?e a ?o . ?e ?l }"// - + " UNION { ?e ?o . ?o ?l }"// - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablePrefix + "INDEX1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, tablePrefix + "INDEX2"); - - CountingResultHandler crh = new CountingResultHandler(); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - - Assert.assertEquals(6, crh.getCount()); - - - - } - - - - - - @Test - public void testEvaluateTwoIndexLeftJoinUnionFilter() throws AccumuloException, AccumuloSecurityException, - TableExistsException, RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, - TableNotFoundException, TupleQueryResultHandlerException, RyaDAOException { - - if (accCon.tableOperations().exists(tablePrefix + "INDEX1")) { - accCon.tableOperations().delete(tablePrefix + "INDEX1"); - } - - if (accCon.tableOperations().exists(tablePrefix + "INDEX2")) { - accCon.tableOperations().delete(tablePrefix + "INDEX2"); - } - - accCon.tableOperations().create(tablePrefix + "INDEX1"); - accCon.tableOperations().create(tablePrefix + "INDEX2"); - - conn.add(obj, RDFS.LABEL, new LiteralImpl("label")); - conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2")); - conn.add(sub, RDF.TYPE, obj); - conn.add(sub2, RDF.TYPE, obj2); - - URI livesIn = new URIImpl("uri:livesIn"); - URI city = new URIImpl("uri:city"); - URI city2 = new URIImpl("uri:city2"); - URI city3 = new URIImpl("uri:city3"); - conn.add(sub,livesIn,city); - conn.add(sub2,livesIn,city2); - conn.add(sub2,livesIn,city3); - conn.add(sub,livesIn,city3); - - - String indexSparqlString = ""// - + "SELECT ?e ?l ?o " // - + "{" // - + " ?e a ?o . "// - + " ?e ?l "// - + "}";// - - String indexSparqlString2 = ""// - + "SELECT ?e ?l ?o " // - + "{" // - + " ?e ?o . "// - + " ?o ?l "// - + "}";// - - String queryString = ""// - + "SELECT ?c ?e ?l ?o " // - + "{" // - + " Filter(?c = ) " // - + " ?e ?c . "// - + " OPTIONAL{{ ?e a ?o . ?e ?l }"// - + " UNION { ?e ?o . ?o ?l }}"// - + "}";// - - AccumuloIndexSet ais1 = new AccumuloIndexSet(indexSparqlString, conn, accCon, tablePrefix + "INDEX1"); - AccumuloIndexSet ais2 = new AccumuloIndexSet(indexSparqlString2, conn, accCon, tablePrefix + "INDEX2"); - - CountingResultHandler crh = new CountingResultHandler(); - conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh); - -// Scanner scan = accCon.createScanner(tablePrefix + "spo", new Authorizations("U")); -// -// for(Entry e: scan) { -// System.out.println(e.getKey().getRow()); -// } - - Assert.assertEquals(6, crh.getCount()); - - - - } - - - - - public static class CountingResultHandler implements TupleQueryResultHandler { - private int count = 0; - - public int getCount() { - return count; - } - - public void resetCount() { - this.count = 0; - } - - @Override - public void startQueryResult(List arg0) throws TupleQueryResultHandlerException { - } - - - @Override - public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException { - System.out.println(arg0); - count++; - System.out.println("Count is " + count); - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - } - - - - - -} - - - - diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java deleted file mode 100644 index 396224f2b..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java +++ /dev/null @@ -1,521 +0,0 @@ -package mvm.rya.indexing.external; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.ArrayList; -import java.util.List; - -import junit.framework.Assert; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.junit.Test; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.beust.jcommander.internal.Lists; - -public class PrecompJoinOptimizerTest { - - private String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - private String q8 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?l ."// - + " ?l ?c ."// - + " ?c ?e . "// - + "}";// - - private String q9 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - - - - private String q15 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - private String q16 = ""// - + "SELECT ?f ?m ?d ?e ?l " // - + "{" // - + " ?d ?f . "// - + " ?d ?e . "// - + " ?m ?d ."// - + " ?l ?d ."// - + "}";// - - private String q17 = ""// - + "SELECT ?chicken ?dog ?cat " // - + "{" // - + " ?chicken ?dog . "// - + " ?cat ?chicken ."// - + "}";// - - private String q18 = ""// - + "SELECT ?dog ?chicken " // - + "{" // - + " ?chicken ?dog . "// - + "}";// - - private String q19 = ""// - + "SELECT ?cat ?chicken " // - + "{" // - + " ?cat ?chicken ."// - + "}";// - - - private String q20 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - - - private String q21 = ""// - + "SELECT ?u ?s ?t " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - @Test - public void testSingleIndex() throws Exception { - - SPARQLParser parser = new SPARQLParser(); - - - ParsedQuery pq1 = parser.parseQuery(q15, null); - ParsedQuery pq2 = parser.parseQuery(q7, null); - ParsedQuery pq3 = parser.parseQuery(q8, null); - ParsedQuery pq4 = parser.parseQuery(q9, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup1); - - List optTupNodes = Lists.newArrayList(); - optTupNodes.add(extTup2); - optTupNodes.add(extTup3); - - PrecompJoinOptimizer pcj = new PrecompJoinOptimizer(list, true); - TupleExpr te = pq1.getTupleExpr(); - pcj.optimize(te, null, null); - - NodeCollector nc = new NodeCollector(); - te.visit(nc); - - List qNodes = nc.getNodes(); - - - Assert.assertEquals(qNodes.size(), optTupNodes.size()); - for(QueryModelNode node: qNodes) { - Assert.assertTrue(optTupNodes.contains(node)); - } - - - } - - - - - - @Test - public void testSingleIndex2() throws Exception { - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " ?f a ?m ."// - + " ?c a ?l ."// - + " ?d ?f . "// - + " ?e ?c . "// - + " ?m ?d ."// - + " ?l ?e ."// - + " ?m ?e . "// - + "}";// - - String q2 = ""// - + "SELECT ?u ?s ?t " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - String q3 = ""// - + "SELECT ?e ?c ?l " // - + "{" // - + " ?c a ?l ."// - + " ?l ?e ."// - + " ?e ?c . "// - + "}";// - - String q4 = ""// - + "SELECT ?d ?f ?m " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - ParsedQuery pq3 = parser.parseQuery(q3, null); - ParsedQuery pq4 = parser.parseQuery(q4, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup1); - - List spList = StatementPatternCollector.process(pq1.getTupleExpr()); - List optTupNodes = Lists.newArrayList(); - optTupNodes.add(extTup3); - optTupNodes.add(spList.get(6)); - optTupNodes.add(extTup2); - - PrecompJoinOptimizer pcj = new PrecompJoinOptimizer(list, true); - TupleExpr te = pq1.getTupleExpr(); - pcj.optimize(te, null, null); - - NodeCollector nc = new NodeCollector(); - te.visit(nc); - - //System.out.println("Optimal plan is " + optimalTup); - - List qNodes = nc.getNodes(); - //System.out.println("Returned list is " + qNodes + " and comp list is " + optTupNodes); - - Assert.assertTrue(qNodes.equals(optTupNodes)); - - } - - - - - - - - - @Test - public void testTwoIndex() throws Exception { - - String q1 = ""// - + "SELECT ?f ?m ?d ?h ?i " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?d ?f ." // - + " ?f ?h ." // - + " ?f ?i ." // - + " ?i ?h ." // - + "}";// - - String q2 = ""// - + "SELECT ?t ?s ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s ?t ." // - + " ?t ?u ." // - + "}";// - - String q5 = ""// - + "SELECT ?m ?f ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - String q6 = ""// - + "SELECT ?d ?f ?h " // - + "{" // - + " ?d ?f ." // - + " ?f ?h ." // - + "}";// - - String q7 = ""// - + "SELECT ?f ?i ?h " // - + "{" // - + " ?f ?i ." // - + " ?i ?h ." // - + "}";// - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = parser.parseQuery(q1, null); - ParsedQuery pq2 = parser.parseQuery(q2, null); - ParsedQuery pq3 = parser.parseQuery(q3, null); - ParsedQuery pq4 = parser.parseQuery(q4, null); - ParsedQuery pq5 = parser.parseQuery(q5, null); - ParsedQuery pq6 = parser.parseQuery(q6, null); - ParsedQuery pq7 = parser.parseQuery(q7, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - SimpleExternalTupleSet extTup5 = new SimpleExternalTupleSet((Projection) pq6.getTupleExpr()); - SimpleExternalTupleSet extTup6 = new SimpleExternalTupleSet((Projection) pq7.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - list.add(extTup3); - - List optTupNodes = Lists.newArrayList(); - optTupNodes.add(extTup4); - optTupNodes.add(extTup6); - optTupNodes.add(extTup5); - - PrecompJoinOptimizer pcj = new PrecompJoinOptimizer(list, true); - TupleExpr te = pq1.getTupleExpr(); - pcj.optimize(te, null, null); - - System.out.println(te); - - NodeCollector nc = new NodeCollector(); - te.visit(nc); - - List qNodes = nc.getNodes(); - - Assert.assertTrue(qNodes.equals(optTupNodes)); - - } - - - - - - - @Test - public void twoIndexFilterTest() { - - - String q1 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c " // - + "{" // - + " Filter(?f > \"5\")." // - + " Filter(?e > \"5\")." // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?m ?d ."// - + " ?l ?c ."// - + "}";// - - - String q2 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - String q3 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " Filter(?s > \"5\") ."// - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - - String q4 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " Filter(?f > \"5\") ."// - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - - String q5 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " Filter(?e > \"5\") ."// - + " ?e a ?l ."// - + " ?l ?c ."// - + " ?c ?e . "// - + "}";// - - - - - SPARQLParser parser = new SPARQLParser(); - - ParsedQuery pq1 = null; - ParsedQuery pq2 = null; - ParsedQuery pq3 = null; - ParsedQuery pq4 = null; - ParsedQuery pq5 = null; - - - - try { - pq1 = parser.parseQuery(q1, null); - pq2 = parser.parseQuery(q2, null); - pq3 = parser.parseQuery(q3, null); - pq4 = parser.parseQuery(q4, null); - pq5 = parser.parseQuery(q5, null); - - - } catch (Exception e) { - e.printStackTrace(); - } - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq2.getTupleExpr()); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet((Projection) pq3.getTupleExpr()); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet((Projection) pq4.getTupleExpr()); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet((Projection) pq5.getTupleExpr()); - - List list = new ArrayList(); - - list.add(extTup2); - list.add(extTup1); - - List list2 = new ArrayList(); - - list2.add(extTup3); - list2.add(extTup4); - - PrecompJoinOptimizer pcj = new PrecompJoinOptimizer(list, true); - TupleExpr te = pq1.getTupleExpr(); - pcj.optimize(te, null, null); - - System.out.println(te); - - NodeCollector nc = new NodeCollector(); - te.visit(nc); - - Assert.assertEquals(nc.getNodes().size(), list2.size()); - - for(QueryModelNode e: nc.getNodes()) { - Assert.assertTrue(list2.contains((ExternalTupleSet)e)); - } - - - - } - - - - - - - - - public static class NodeCollector extends QueryModelVisitorBase { - - List qNodes = Lists.newArrayList(); - - - public List getNodes() { - return qNodes; - } - - - - @Override - public void meetNode(QueryModelNode node) { - if(node instanceof StatementPattern || node instanceof ExternalTupleSet) { - qNodes.add(node); - } - super.meetNode(node); - - } - - - } - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java deleted file mode 100644 index bac9871b4..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java +++ /dev/null @@ -1,1654 +0,0 @@ -package mvm.rya.indexing.external.tupleSet; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.indexing.external.ExternalProcessor; -import mvm.rya.indexing.external.ExternalProcessor.BindingSetAssignmentCollector; -import mvm.rya.indexing.external.tupleSet.ExternalTupleSet; -import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet; - -import org.junit.Test; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.junit.Assert; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Sets; - - - - - - -public class ExternalProcessorTest { - - - - - private String queryString = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?c a ?l . "// - + " ?e ?l . "// - + " ?e ?o "// - + "}";// - - private String indexSparqlString = ""// - + "SELECT ?x ?y ?z " // - + "{" // - + " ?x ?z. "// - + " ?x a ?y . "// - + " ?y a ?z "// - + "}";// - - - private String q1 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?c ?l. "// - + " ?l ?e . "// - + "}";// - - private String q2 = ""// - + "SELECT ?a ?t ?v " // - + "{" // - + " ?a a ?t . "// - + " ?t ?v . "// - + " ?v ?a . "// - + "}";// - - - - private String q5 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - - - private String q7 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t ."// - + " ?t ?u ."// - + " ?u ?s . "// - + "}";// - - - private String q8 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + "}";// - - - - - private String q11 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + "}";// - - - private String q12 = ""// - + "SELECT ?b ?p ?dog ?cat " // - + "{" // - + " ?b a ?p ."// - + " ?dog a ?cat. "// - + "}";// - - - - private String q13 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit ?dick ?jane ?betty " // - + "{" // - + " ?w a ?t ."// - + " ?x a ?y ."// - + " ?duck a ?chicken ."// - + " ?pig a ?rabbit ."// - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + " ?dick ?jane . "// - + " ?jane ?betty . "// - + "}";// - - - private String q14 = ""// - + "SELECT ?harry ?susan ?mary " // - + "{" // - + " ?harry ?susan . "// - + " ?susan ?mary . "// - + "}";// - - - - String q15 = ""// - + "SELECT ?a ?b ?c ?d ?e ?f ?q " // - + "{" // - + " GRAPH ?x { " // - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?d ?e . "// - + " FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " // - + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " // - + " ?b a ?q ."// - + " }"// - + "}";// - - - String q16 = ""// - + "SELECT ?g ?h ?i " // - + "{" // - + " GRAPH ?y { " // - + " ?g a ?h ."// - + " ?h ?i ."// - + " }"// - + "}";// - - String q17 = ""// - + "SELECT ?j ?k ?l ?m ?n ?o " // - + "{" // - + " GRAPH ?z { " // - + " ?j ?k . "// - + " FILTER ( ?k < ?l && (?m > ?n || ?o = ?j) ). " // - + " }"// - + "}";// - - String q18 = ""// - + "SELECT ?r ?s ?t ?u " // - + "{" // - + " GRAPH ?q { " // - + " FILTER(bound(?r) && sameTerm(?s,?t)&&bound(?u)). " // - + " ?t a ?u ."// - + " }"// - + "}";// - - - - String q19 = ""// - + "SELECT ?a ?b ?c ?d ?e ?f ?q ?g ?h " // - + "{" // - + " GRAPH ?x { " // - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?d ?e . "// - + " FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " // - + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " // - + " FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " // - + " ?h ?g. "// - + " ?b a ?q ."// - + " }"// - + "}";// - - - String q20 = ""// - + "SELECT ?m ?n " // - + "{" // - + " GRAPH ?q { " // - + " FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " // - + " ?n ?m. "// - + " }"// - + "}";// - - - String q21 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt " // - + "{" // - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"Polygon\")) " // - + "}";// - - - String q22 = "PREFIX fts: "// - + "SELECT ?person ?commentmatch ?labelmatch" // - + "{" // - + " ?person a . "// - + " ?person ?labelmatch . "// - + " ?person ?commentmatch . "// - + " FILTER(fts:text(?labelmatch, \"bob\")) . " // - + " FILTER(fts:text(?commentmatch, \"bob\")) " // - + "}";// - - - String q23 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?a ?b ?c " // - + "{" // - + " ?a a geo:Feature . "// - + " ?b a geo:Point . "// - + " ?b geo:asWKT ?c . "// - + " FILTER(geof:sfWithin(?c, \"Polygon\")) " // - + "}";// - - - String q24 = "PREFIX fts: "// - + "SELECT ?f ?g " // - + "{" // - + " ?f ?g . "// - + " FILTER(fts:text(?g, \"bob\")) " // - + "}";// - - - String q25 = "PREFIX fts: "// - + "SELECT ?person ?commentmatch ?labelmatch ?point" // - + "{" // - + " ?person a ?point. " // - + " ?person a . "// - + " ?person ?labelmatch . "// - + " ?person ?commentmatch . "// - + " FILTER((?person > ?point) || (?person = ?labelmatch)). " - + " FILTER(fts:text(?labelmatch, \"bob\")) . " // - + " FILTER(fts:text(?commentmatch, \"bob\")) " // - + "}";// - - - String q26 = "PREFIX fts: "// - + "SELECT ?a ?b ?c " // - + "{" // - + " ?a a ?c. " // - + " ?a a . "// - + " ?a ?b . "// - + " FILTER((?a > ?c) || (?a = ?b)). " - + " FILTER(fts:text(?b, \"bob\")) . " // - + "}";// - - - - String q27 = "PREFIX fts: "// - + "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?person ?commentmatch ?labelmatch ?other ?feature ?point ?wkt ?g ?h" // - + "{" // - + " ?person a . "// - + " ?person ?labelmatch . "// - + " ?person ?commentmatch . "// - + " FILTER((?person > ?other) || (?person = ?labelmatch)). " - + " ?person a ?other. "// - + " FILTER(fts:text(?labelmatch, \"bob\")) . " // - + " FILTER(fts:text(?commentmatch, \"bob\")) " // - + " ?feature a geo:Feature . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"Polygon\")) " // - + " FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " // - + " ?h ?g. "// - + "}";// - - - String q28 = ""// - + "SELECT ?m ?n " // - + "{" // - + " FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " // - + " ?n ?m. "// - + "}";// - - - String q29 = ""// - + "SELECT ?m ?n ?o" // - + "{" // - + " FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " // - + " ?n ?m. "// - + " ?m a ?o." // - + " FILTER(ISNUMERIC(?o))." - + "}";// - - String q30 = ""// - + "SELECT ?pig ?dog ?owl" // - + "{" // - + " FILTER(?pig IN (1,2,3) && ?dog NOT IN(5,6,7)). " // - + " ?dog ?pig. "// - + " ?pig a ?owl. " // - + " FILTER(ISNUMERIC(?owl))." - + "}";// - - - String q31 = ""// - + "SELECT ?q ?r ?s " // - + "{" // - + " {?q a ?r} UNION {?r a ?s} ."// - + " ?r a ?s ."// - + "}";// - - - - String q33 = ""// - + "SELECT ?q ?r ?s ?t " // - + "{" // - + " OPTIONAL {?q a ?r} ."// - + " ?s a ?t ."// - + "}";// - - - String q34 = ""// - + "SELECT ?q ?r " // - + "{" // - + " FILTER(?q > ?r) ."// - + " ?q a ?r ."// - + "}";// - - - String q35 = "PREFIX fts: "// - + "SELECT ?s ?t ?u ?v ?w ?x ?y ?z " // - + "{" // - + " FILTER(?s > ?t)."// - + " ?s a ?t ."// - + " FILTER(?u > ?v)."// - + " ?u a ?v ."// - + " ?w ?x ."// - + " FILTER(fts:text(?x, \"bob\")) . " // - + " ?y ?z ."// - + " FILTER(fts:text(?z, \"bob\")) . " // - + "}";// - - - String q36 = "PREFIX fts: "// - + "SELECT ?dog ?cat " // - + "{" // - + " ?dog ?cat ."// - + " FILTER(fts:text(?cat, \"bob\")) . " // - + "}";// - - - String q37 = "PREFIX fts: "// - + "SELECT ?s ?t " // - + "{" // - + " FILTER(?s > ?t)."// - + " ?s a ?t ."// - + " FILTER(?s > ?t)."// - + " ?s a ?t ."// - + " FILTER(?s > ?t)."// - + " ?s a ?t ."// - + "}";// - - - - String q38 = "PREFIX fts: "// - + "SELECT ?s ?t " // - + "{" // - + " FILTER(?s > ?t)."// - + " ?s a ?t ."// - + " ?t ?s ."// - + " FILTER(?s > ?t)."// - + "}";// - - - - String q39 = "PREFIX fts: "// - + "SELECT ?s ?t " // - + "{" // - + " VALUES(?s) { ()()} ." // - + " ?t ." // - + " ?t ?s ."// - + "}";// - - String q40 = "PREFIX fts: "// - + "SELECT ?u ?v " // - + "{" // - + " ?v ." // - + " ?v ?u ."// - + "}";// - - String q41 = "PREFIX fts: "// - + "SELECT ?s ?t ?w ?x" // - + "{" // - + " FILTER(?s > ?t)."// - + " VALUES(?s) { ()()} ." // - + " VALUES(?w) { () () } ." // - + " ?t ." // - + " ?t ?s ."// - + " ?w ." // - + " ?w ?x ."// - + "}";// - - String q42 = "PREFIX fts: "// - + "SELECT ?u ?v " // - + "{" // - + " FILTER(?u > ?v)."// - + " ?v ." // - + " ?v ?u ."// - + "}";// - - String q43 = "PREFIX fts: "// - + "SELECT ?a ?b " // - + "{" // - + " ?b ." // - + " ?b ?a ."// - + "}";// - - - - - @Test - public void testVarRelableIndexSmaller() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(queryString, null); - ParsedQuery pq2 = parser2.parseQuery(indexSparqlString, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - - - Assert.assertTrue(qSet.containsAll(set) && set.size() != 0); - - } - - - - @Test - public void testVarRelableIndexSameSize() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q1, null); - ParsedQuery pq2 = parser2.parseQuery(q2, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - - Assert.assertTrue(set.equals(qSet)); - - - } - - - - - - @Test - public void testTwoIndexLargeQuery() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q11, null); - ParsedQuery pq2 = parser2.parseQuery(q7, null); - ParsedQuery pq3 = parser3.parseQuery(q12, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + pq3.getTupleExpr()); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - Assert.assertTrue(set.equals(qSet)); - - - } - - - - @Test - public void testThreeIndexLargeQuery() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - SPARQLParser parser4 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q13, null); - ParsedQuery pq2 = parser2.parseQuery(q5, null); - ParsedQuery pq3 = parser3.parseQuery(q12, null); - ParsedQuery pq4 = parser4.parseQuery(q14, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr()+ " , " + pq3.getTupleExpr()+ " , " +pq4.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - list.add(extTup3); - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - Assert.assertTrue(set.equals(qSet)); - - } - - - - - - - - - @Test - public void testSingleIndexLargeQuery() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q8, null); - ParsedQuery pq2 = parser2.parseQuery(q7, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - - Assert.assertTrue(set.equals(qSet)); - - } - - - - - - - @Test - public void testContextFilter() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - SPARQLParser parser4 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q15, null); - ParsedQuery pq2 = parser2.parseQuery(q16, null); - ParsedQuery pq3 = parser3.parseQuery(q17, null); - ParsedQuery pq4 = parser4.parseQuery(q18, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr()+ " , " + pq3.getTupleExpr()+ " , " +pq4.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - list.add(extTup3); - - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - Assert.assertTrue(qSet.containsAll(set) && eTupSet.size() == 1); - } - - - - - - @Test - public void testContextFilterFourIndex() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - SPARQLParser parser4 = new SPARQLParser(); - SPARQLParser parser5 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q19, null); - ParsedQuery pq2 = parser2.parseQuery(q16, null); - ParsedQuery pq3 = parser3.parseQuery(q17, null); - ParsedQuery pq4 = parser4.parseQuery(q18, null); - ParsedQuery pq5 = parser5.parseQuery(q20, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr()+ " , " + pq3.getTupleExpr()+ " , " +pq4.getTupleExpr()+ " , " +pq5.getTupleExpr()); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet(new Projection(pq5.getTupleExpr())); - - - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - list.add(extTup3); - list.add(extTup4); - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - Assert.assertTrue(qSet.containsAll(set) && eTupSet.size() == 2); - } - - - - - @Test - public void testGeoIndexFunction() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q21, null); - ParsedQuery pq2 = parser2.parseQuery(q23, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - - Assert.assertTrue(qSet.containsAll(set) && set.size() != 0); - - } - - - - @Test - public void testFreeTestIndexFunction() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q22, null); - ParsedQuery pq2 = parser2.parseQuery(q24, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - - Assert.assertTrue(qSet.containsAll(set) && set.size() != 0); - - } - - - @Test - public void testThreeIndexGeoFreeCompareFilterMix() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q25, null); - ParsedQuery pq2 = parser2.parseQuery(q24, null); - ParsedQuery pq3 = parser3.parseQuery(q26, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + pq3.getTupleExpr()); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 2); - - - } - - - - - - @Test - public void testFourIndexGeoFreeCompareFilterMix() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - SPARQLParser parser4 = new SPARQLParser(); - SPARQLParser parser5 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q27, null); - ParsedQuery pq2 = parser2.parseQuery(q23, null); - ParsedQuery pq3 = parser3.parseQuery(q26, null); - ParsedQuery pq4 = parser4.parseQuery(q24, null); - ParsedQuery pq5 = parser5.parseQuery(q28, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr() + " , " + pq3.getTupleExpr() + " , " + pq4.getTupleExpr()+ " and " + pq5.getTupleExpr()); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet(new Projection(pq5.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup4); - list.add(extTup1); - list.add(extTup2); - list.add(extTup3); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Assert.assertTrue(eTupSet.size() == 4); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - Assert.assertTrue(set.equals(qSet)); - - - - } - - - - - - @Test - public void testThreeIndexGeoFreeCompareFilterMix2() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - SPARQLParser parser4 = new SPARQLParser(); - - - ParsedQuery pq1 = parser1.parseQuery(q27, null); - ParsedQuery pq2 = parser2.parseQuery(q23, null); - ParsedQuery pq3 = parser3.parseQuery(q26, null); - ParsedQuery pq4 = parser4.parseQuery(q28, null); - - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr() + " , " + pq3.getTupleExpr() + " , " + pq4.getTupleExpr()); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - - - - List list = new ArrayList(); - - list.add(extTup1); - list.add(extTup3); - list.add(extTup2); - - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Assert.assertTrue(eTupSet.size() == 3); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - Assert.assertTrue(qSet.containsAll(set)); - - - } - - - - - - - - @Test - public void testISNUMERIC() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q29, null); - ParsedQuery pq2 = parser2.parseQuery(q30, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 1); - - - } - - - @Test - public void testInvalidQueryUnion() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q31, null); - ParsedQuery pq2 = parser2.parseQuery(q31, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - boolean thrown = false; - - try { - ExternalProcessor processor = new ExternalProcessor(list); - processor.process(pq1.getTupleExpr()); - } catch (IllegalArgumentException e) { - System.out.println(e); - thrown = true; - } - - Assert.assertTrue(thrown); - - } - - - - - - @Test - public void testInvalidQueryOptional() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q33, null); - ParsedQuery pq2 = parser2.parseQuery(q33, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - boolean thrown = false; - - try { - ExternalProcessor processor = new ExternalProcessor(list); - processor.process(pq1.getTupleExpr()); - } catch (IllegalArgumentException e) { - System.out.println(e); - thrown = true; - } - - Assert.assertTrue(thrown); - - } - - - - - @Test - public void testTwoRepeatedIndex() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q35, null); - ParsedQuery pq2 = parser2.parseQuery(q34, null); - ParsedQuery pq3 = parser3.parseQuery(q36, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + pq3.getTupleExpr()); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for(QueryModelNode s: eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for(StatementPattern t: tempSet) { - set.add(t); - } - - } - - - Assert.assertTrue(set.equals(qSet) && eTupSet.size()==4); - - - } - - - - @Test - public void testRepeatedStatementPatternQuery() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q37, null); - ParsedQuery pq2 = parser2.parseQuery(q34, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - boolean thrown = false; - - try { - ExternalProcessor processor = new ExternalProcessor(list); - processor.process(pq1.getTupleExpr()); - } catch (IllegalArgumentException e) { - System.out.println(e); - thrown = true; - } - - Assert.assertTrue(thrown); - } - - - - - - - - - @Test - public void testRepeatedFilterQuery() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q38, null); - ParsedQuery pq2 = parser2.parseQuery(q38, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - boolean thrown = false; - - try { - ExternalProcessor processor = new ExternalProcessor(list); - processor.process(pq1.getTupleExpr()); - } catch (IllegalArgumentException e) { - System.out.println(e); - thrown = true; - } - - Assert.assertTrue(thrown); - } - - - - - @Test - public void testBindingSetAssignment1() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q39, null); - ParsedQuery pq2 = parser2.parseQuery(q40, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - List list = new ArrayList(); - - list.add(extTup1); - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for (QueryModelNode s : eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for (StatementPattern t : tempSet) { - set.add(t); - } - - Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 1); - - BindingSetAssignmentCollector bsac1 = new BindingSetAssignmentCollector(); - BindingSetAssignmentCollector bsac2 = new BindingSetAssignmentCollector(); - pq1.getTupleExpr().visit(bsac1); - tup.visit(bsac2); - - Assert.assertTrue(bsac1.getBindingSetAssignments().equals(bsac2.getBindingSetAssignments())); - - } - } - - - @Test - public void testBindingSetAssignment2() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q41, null); - ParsedQuery pq2 = parser2.parseQuery(q42, null); - ParsedQuery pq3 = parser2.parseQuery(q43, null); - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - - ExternalProcessor processor = new ExternalProcessor(list); - TupleExpr tup = processor.process(pq1.getTupleExpr()); - System.out.println("Processed query is " + tup); - - ExternalTupleVstor visitor = new ExternalTupleVstor(); - tup.visit(visitor); - - StatementPatternCollector spc = new StatementPatternCollector(); - pq1.getTupleExpr().visit(spc); - Set qSet = Sets.newHashSet(spc.getStatementPatterns()); - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - for (QueryModelNode s : eTupSet) { - StatementPatternCollector spc1 = new StatementPatternCollector(); - ((ExternalTupleSet) s).getTupleExpr().visit(spc1); - Set tempSet = Sets.newHashSet(spc1.getStatementPatterns()); - for (StatementPattern t : tempSet) { - set.add(t); - } - } - - Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 2); - - BindingSetAssignmentCollector bsac1 = new BindingSetAssignmentCollector(); - BindingSetAssignmentCollector bsac2 = new BindingSetAssignmentCollector(); - pq1.getTupleExpr().visit(bsac1); - tup.visit(bsac2); - - Assert.assertTrue(bsac1.getBindingSetAssignments().equals(bsac2.getBindingSetAssignments())); - - } - - - - public static class ExternalTupleVstor extends QueryModelVisitorBase { - - private Set eSet = new HashSet(); - - @Override - public void meetNode(QueryModelNode node) throws RuntimeException { - if (node instanceof ExternalTupleSet) { - eSet.add(node); - } - super.meetNode(node); - } - - public Set getExtTup() { - return eSet; - } - - } - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java deleted file mode 100644 index aec959e16..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java +++ /dev/null @@ -1,965 +0,0 @@ -package mvm.rya.indexing.external.tupleSet; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; -import java.util.Set; - -import mvm.rya.indexing.external.QueryVariableNormalizer; - -import org.junit.Assert; -import org.junit.Test; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Sets; - - - -public class QueryVariableNormalizerTest { - - private String q1 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?e a ?c . "// - + " ?c ?l. "// - + " ?l ?e . "// - + "}";// - - private String q2 = ""// - + "SELECT ?a ?t ?v " // - + "{" // - + " ?a a ?t . "// - + " ?t ?v . "// - + " ?v ?a . "// - + "}";// - - private String q3 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?d . "// - + " ?f ?m "// - + "}";// - - private String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t . "// - + " ?t ?u "// - + "}";// - - private String q5 = ""// - + "SELECT ?f ?m ?d ?s " // - + "{" // - + " ?m a ?d . "// - + " ?f a ?m . "// - + " ?f a ?s . "// - + " ?m ?f . "// - + " ?s ?m "// - + "}";// - - private String q6 = ""// - + "SELECT ?q ?r ?s ?t ?u " // - + "{" // - + " ?q a ?r ."// - + " ?r a ?s ."// - + " ?t a ?u ."// - + "}";// - - private String q7 = ""// - + "SELECT ?s ?t ?u ?x ?y ?z " // - + "{" // - + " ?s a ?t ."// - + " ?x a ?y ."// - + " ?t ?u ."// - + " ?y ?z ."// - + "}";// - - private String q8 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?f ?m . "// - + " ?m ?a . "// - + " ?o ?r . "// - + "}";// - - private String q9 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - private String q10 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - private String q11 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?x ?y ?z" // - + "{" // - + " ?f a ?m ."// - + " ?m a ?d ."// - + " ?d a ?e ."// - + " ?e a ?l ."// - + " ?l a ?c ."// - + " ?x a ?y ."// - + " ?y a ?z ."// - + " ?z a ?x ."// - + "}";// - - private String q12 = ""// - + "SELECT ?s ?t ?u ?v " // - + "{" // - + " \"hello\" ?s ?t ."// - + " ?t a ?u ."// - + " ?u ?v \"m\" . "// - + "}";// - - private String q13 = ""// - + "SELECT ?x ?y ?z ?w " // - + "{" // - + " \"hello\" ?x ?y ."// - + " ?y a ?z ."// - + " ?z ?w \"m\" . "// - + "}";// - - private String q14 = ""// - + "SELECT ?e ?l ?c " // - + "{" // - + " ?c a ?l . "// - + " ?l ?e. "// - + " ?e ?c . "// - + "}";// - - String q15 = ""// - + "SELECT ?x ?y ?z ?w " // - + "{" // - + " ?x ?y ?z ."// - + " ?y ?z ?w ."// - + "}";// - - String q16 = ""// - + "SELECT ?a ?b ?c " // - + "{" // - + " ?a ?b ?c ."// - + "}";// - - String q17 = ""// - + "SELECT ?q ?r " // - + "{" // - + " ?q ?r \"url:\" ."// - + "}";// - - private String q18 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?h ?r ."// - + " ?f a ?m ."// - + " ?p ?n . "// - + " ?e a ?l ."// - + " ?o ?p ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?r ?a . "// - + "}";// - - String q23 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " GRAPH ?x { " // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?x a ?f. "// - + " }"// - + "}";// - - String q22 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " GRAPH ?y { " // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " ?y a ?f . "// - + " }"// - + "}";// - - String q19 = ""// - + "SELECT ?r ?s ?t " // - + "{" // - + " GRAPH ?u { " // - + " ?r a ?s ."// - + " ?s ?t ."// - + " ?t ?r . "// - + " ?u a ?r . "// - + " }"// - + "}";// - - String q20 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " GRAPH { " // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " }"// - + "}";// - - String q21 = ""// - + "SELECT ?r ?s ?t " // - + "{" // - + " GRAPH { " // - + " ?r a ?s ."// - + " ?s ?t ."// - + " ?t ?r . "// - + " }"// - + "}";// - - private String q24 = ""// - + "SELECT ?e ?l ?c ?x ?y ?z " // - + "{" // - + " GRAPH ?d { " // - + " ?c a ?l . "// - + " ?l ?e. "// - + " ?e ?c . "// - + " ?x a ?y . "// - + " ?y ?z. "// - + " ?z ?x . "// - + "}" // - + "}";// - - String q25 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " GRAPH ?w { " // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + " }"// - + "}";// - - private String q26 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?x ?y ?z" // - + "{" // - + " GRAPH ?w { " // - + " ?f a ?m ."// - + " ?m a ?d ."// - + " ?d a ?e ."// - + " ?e a ?l ."// - + " ?l a ?c ."// - + " ?x a ?y ."// - + " ?y a ?z ."// - + " ?z a ?x ."// - + " }"// - + "}";// - - private String q27 = ""// - + "SELECT ?q ?r ?s ?t ?u " // - + "{" // - + " GRAPH ?n { " // - + " ?q a ?r ."// - + " ?r a ?s ."// - + " ?t a ?u ."// - + " }"// - + "}";// - - - - - String q30 = ""// - + "SELECT ?a ?b ?c ?d ?e ?f ?q ?g ?h " // - + "{" // - + " GRAPH ?x { " // - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?d ?e . "// - + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " // - + " FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " // - + " FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " // - + " ?x ?g. "// - + " ?b a ?q ."// - + " }"// - + "}";// - - - String q31 = ""// - + "SELECT ?m ?n " // - + "{" // - + " GRAPH ?q { " // - + " FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " // - + " ?q ?m. "// - + " }"// - + "}";// - - - String q32 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt " // - + "{" // - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"Polygon\")) " // - + "}";// - - - String q33 = "PREFIX fts: "// - + "SELECT ?person ?commentmatch ?labelmatch" // - + "{" // - + " ?person a . "// - + " ?person ?labelmatch . "// - + " ?person ?commentmatch . "// - + " FILTER(fts:text(?labelmatch, \"bob\")) . " // - + " FILTER(fts:text(?commentmatch, \"bob\")) " // - + "}";// - - - String q34 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?a ?b ?c " // - + "{" // - + " ?a a geo:Feature . "// - + " ?b a geo:Point . "// - + " ?b geo:asWKT ?c . "// - + " FILTER(geof:sfWithin(?c, \"Polygon\")) " // - + "}";// - - - String q35 = "PREFIX fts: "// - + "SELECT ?a ?b " // - + "{" // - + " ?a ?b . "// - + " FILTER(fts:text(?b, \"bob\")) " // - + "}";// - - - - - - - - - - - - /** - * @param tuple1 - * @param tuple2 - * @return - * @throws Exception - */ - public boolean tupleEquals(TupleExpr tuple1, TupleExpr tuple2) throws Exception { - - Set spSet1 = Sets.newHashSet(StatementPatternCollector.process(tuple1)); - Set spSet2 = Sets.newHashSet(StatementPatternCollector.process(tuple2)); - - return spSet1.equals(spSet2); - - } - - /** - * @param tuple1 - * @param tuple2 - * @return - * @throws Exception - */ - public boolean isTupleSubset(TupleExpr tuple1, TupleExpr tuple2) throws Exception { - - Set spSet1 = Sets.newHashSet(StatementPatternCollector.process(tuple1)); - Set spSet2 = Sets.newHashSet(StatementPatternCollector.process(tuple2)); - - return (Sets.intersection(spSet1, spSet2).equals(spSet2)); - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext on the queries q1,q2 - * which are the same up to a relabeling of variables. - */ - @Test - public void testEqThreeDiffVars() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q1, null); - ParsedQuery pq2 = parser2.parseQuery(q2, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertEquals(1, normalize.size()); - - for (TupleExpr s : normalize) { - Assert.assertTrue(tupleEquals(s, pq1.getTupleExpr())); - - } - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext on queries q1 and q14 - * which are the same up to the permutation of their variables. - */ - @Test - public void testEqThreePermuteVars() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q1, null); - ParsedQuery pq2 = parser2.parseQuery(q14, null); - - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertEquals(1, normalize.size()); - - for (TupleExpr s : normalize) { - Assert.assertTrue(tupleEquals(s, pq1.getTupleExpr())); - } - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext on the queries q12 and - * q13, which are the same up to a relabeling of the variables, - * but have StatementPatterns whose constants are not - * predicates. - */ - @Test - public void testEqPredNotConst() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q12, null); - ParsedQuery pq2 = parser2.parseQuery(q13, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertEquals(1, normalize.size()); - - for (TupleExpr s : normalize) { - // System.out.println(s); - Assert.assertTrue(tupleEquals(s, pq1.getTupleExpr())); - } - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext on the large query q9 - * with with a smaller, potential index q10 to see if the - * correct number of outputs are produced. - */ - @Test - public void testEqLargeEx() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q9, null); - ParsedQuery pq2 = parser2.parseQuery(q10, null); - - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertEquals(4, normalize.size()); - - for (TupleExpr s : normalize) { - List testList = QueryVariableNormalizer.getNormalizedIndex(pq2.getTupleExpr(), s); - Assert.assertEquals(1, testList.size()); - for (TupleExpr t : testList) { - Assert.assertTrue(t.equals(pq2.getTupleExpr())); - } - } - - SPARQLParser parser3 = new SPARQLParser(); - ParsedQuery pq3 = parser3.parseQuery(q7, null); - List normalize2 = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq3.getTupleExpr()); - - Assert.assertEquals(12, normalize2.size()); - for (TupleExpr s : normalize2) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), s)); - } - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext to see if it recognizes - * that no substitution exists for two moderate, similar queries - * q5 and q1 that are structurally different - */ - @Test - public void testEqNEQ() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q1, null); - ParsedQuery pq2 = parser2.parseQuery(q5, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 0); - - pq1 = parser1.parseQuery(q5, null); - pq2 = parser2.parseQuery(q1, null); - - List normalize2 = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertEquals(1, normalize2.size()); - - for (TupleExpr s : normalize2) { - List testList = QueryVariableNormalizer.getNormalizedIndex(pq2.getTupleExpr(), s); - Assert.assertEquals(1, testList.size()); - for (TupleExpr t : testList) { - Assert.assertTrue(t.equals(pq2.getTupleExpr())); - } - } - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext to see if it recognizes - * that no substitution exists for two small, similar queries q3 - * and q4 that are structurally different - */ - @Test - public void testNeq1() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q3, null); - ParsedQuery pq2 = parser2.parseQuery(q4, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 0); - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext to see if it recognizes - * that no substitution exists for the variables of q8 given - * that it has more variables than q1 - */ - @Test - public void testNeq2() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q1, null); - ParsedQuery pq2 = parser2.parseQuery(q8, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 0); - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext to see if it recognizes - * that no substitution exists for the large queries q8 and q9 - * which contain the same number of variables and are similar. - */ - @Test - public void testNeq() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q9, null); - ParsedQuery pq2 = parser2.parseQuery(q8, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 0); - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext on the large query q11 - * and q6, which have many similar nodes, to see if the correct - * number of outputs are produced. - */ - @Test - public void testLargeNeq() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q11, null); - ParsedQuery pq2 = parser2.parseQuery(q6, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 33); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), s)); - } - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext with two queries whose - * StatementPattern nodes contain no constant Vars. - */ - @Test - public void testNoConstants() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q15, null); - ParsedQuery pq2 = parser2.parseQuery(q16, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 2); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), s)); - } - - pq1 = parser1.parseQuery(q16, null); - pq2 = parser2.parseQuery(q17, null); - normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 0); - - } - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext with same query passed - * in as query and index. Tests that only one index is produced - * and that it equals original query. - */ - @Test - public void testSameTuples() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q11, null); - ParsedQuery pq2 = parser2.parseQuery(q11, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - Assert.assertTrue(normalize.get(0).equals(pq1.getTupleExpr()) && normalize.get(0).equals(pq2.getTupleExpr())); - - } - - - - /** - * @throws Exception - * Tests QueryVariable normalizer on queries q9 and q18, where - * q18 is obtained from q9 by reordering lines. - */ - @Test - public void testOrderEq() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q9, null); - ParsedQuery pq2 = parser2.parseQuery(q18, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 24); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(s, pq1.getTupleExpr())&&isTupleSubset(pq1.getTupleExpr(),s)); - } - - } - - @Test - public void testSimpleVarGraph() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q22, null); - ParsedQuery pq2 = parser2.parseQuery(q23, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - for (TupleExpr s : normalize) { - Assert.assertTrue(tupleEquals(s, pq1.getTupleExpr())); - } - - } - - @Test - public void testVarGraph() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q19, null); - ParsedQuery pq2 = parser2.parseQuery(q22, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - for (TupleExpr s : normalize) { - Assert.assertTrue(tupleEquals(s, pq1.getTupleExpr())); - } - - } - - @Test - public void tesVarConstantGraph() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q19, null); - ParsedQuery pq2 = parser2.parseQuery(q20, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 0); - - } - - @Test - public void testConstantGraph() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q20, null); - ParsedQuery pq2 = parser2.parseQuery(q21, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - for (TupleExpr s : normalize) { - Assert.assertTrue(tupleEquals(s, pq1.getTupleExpr())); - } - - } - - - @Test - public void testMedVarGraph() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q24, null); - ParsedQuery pq2 = parser2.parseQuery(q25, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 2); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(),s)); - } - - } - - - @Test - public void tesGraphVarInBody() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q19, null); - ParsedQuery pq2 = parser2.parseQuery(q25, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - - } - - - @Test - public void tesLargeVarGraph() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q26, null); - ParsedQuery pq2 = parser2.parseQuery(q27, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 33); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(),s)); - } - - - } - - - @Test - public void testFilters1() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q30, null); - ParsedQuery pq2 = parser2.parseQuery(q31, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(),s)); - } - - - } - - - - @Test - public void testFilters2() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q32, null); - ParsedQuery pq2 = parser2.parseQuery(q34, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(),s)); - } - - - } - - - - @Test - public void testFilters3() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q33, null); - ParsedQuery pq2 = parser2.parseQuery(q35, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 1); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(),s)); - } - - - } - - - - - - - - - - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java deleted file mode 100644 index 6449486eb..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java +++ /dev/null @@ -1,490 +0,0 @@ -package mvm.rya.indexing.external.tupleSet; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - -import mvm.rya.indexing.external.ExternalProcessor; -import mvm.rya.indexing.external.tupleSet.ExternalProcessorTest.ExternalTupleVstor; - -import org.junit.Assert; -import org.junit.Test; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Sets; - -public class VarConstExternalProcessorTest { - - - - - String q15 = ""// - + "SELECT ?a ?b ?c ?d ?e ?f ?q " // - + "{" // - + " GRAPH ?x { " // - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?d ?e . "// - + " FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " // - + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " // - + " ?b a ?q ."// - + " }"// - + "}";// - - - - - String q17 = ""// - + "SELECT ?j ?k ?l ?m ?n ?o " // - + "{" // - + " GRAPH ?z { " // - + " ?l a ?m. " // - + " ?n a ?o. " // - + " ?j ?k . "// - + " FILTER ( ?k < ?l && (?m > ?n || ?o = ?j) ). " // - + " }"// - + "}";// - - String q18 = ""// - + "SELECT ?r ?s ?t ?u " // - + "{" // - + " GRAPH ?q { " // - + " FILTER(bound(?r) && sameTerm(?s,?t)&&bound(?u)). " // - + " ?t a ?u ."// - + " ?s a ?r ."// - + " }"// - + "}";// - - - - String q19 = ""// - + "SELECT ?a ?c ?d ?f ?q " // - + "{" // - + " GRAPH ?x { " // - + " ?f a ?a ."// - + " \"3\" a ?c . "// - + " ?d \"5\" . "// - + " FILTER ( \"5\" < ?f && (?a > \"3\" || ?c = ?d) ). " // - + " FILTER(bound(?f) && sameTerm(?a,\"3\") && bound(?q)). " // - + " \"3\" a ?q ."// - + " ?a a ?f ."// - + " }"// - + "}";// - - - - - - - String q21 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point " // - + "{" // - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT \"wkt\" . "// - + " FILTER(geof:sfWithin(\"wkt\", \"Polygon\")) " // - + "}";// - - - String q22 = "PREFIX fts: "// - + "SELECT ?person " // - + "{" // - + " ?person a . "// - + " ?person \"sally\" . "// - + " ?person \"john\" . "// - + " FILTER(fts:text(\"sally\", \"bob\")) . " // - + " FILTER(fts:text(\"john\", \"harry\")) " // - + " ?person \"bob\". "// - + " ?person \"harry\". "// - + "}";// - - - String q23 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?a ?b ?c ?d " // - + "{" // - + " ?a a geo:Feature . "// - + " ?b a geo:Point . "// - + " ?b geo:asWKT ?c . "// - + " FILTER(geof:sfWithin(?c, ?d)) " // - + "}";// - - - String q24 = "PREFIX fts: "// - + "SELECT ?f ?g ?h" // - + "{" // - + " ?f ?g . "// - + " FILTER(fts:text(?g,?h)). " // - + " ?f ?h. " // - + "}";// - - - String q25 = "PREFIX fts: "// - + "SELECT ?person ?point" // - + "{" // - + " ?person \"label\" . "// - + " FILTER(fts:text(\"label\", \"bob\")) . " // - + " ?person \"bob\" . " // - + " ?person a ?point. " // - + " \"bob\" a . "// - + " ?person \"comment\" . "// - + " FILTER((?person > ?point) || (?person = \"comment\")). " - + " FILTER(fts:text(\"comment\", \"bob\")) " // - + "}";// - - - String q26 = "PREFIX fts: "// - + "SELECT ?a ?b ?c ?d " // - + "{" // - + " ?a a ?c. " // - + " ?d a . "// - + " ?a ?b . "// - + " FILTER((?a > ?c) || (?a = ?b)). " - + " FILTER(fts:text(?b, ?d)) . " // - + "}";// - - - - String q27 = "PREFIX fts: "// - + "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?person ?feature ?point " // - + "{" // - + " ?person \"label\" . "// - + " FILTER(fts:text(\"label\", \"bob\")) . " // - + " ?person \"bob\" . " // - + " ?person a ?point. " // - + " \"bob\" a . "// - + " ?person \"comment\" . "// - + " FILTER((?person > ?point) || (?person = \"comment\")). " - + " FILTER(fts:text(\"comment\", \"bob\")) " // - + " ?feature a geo:Feature . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT \"wkt\" . "// - + " FILTER(geof:sfWithin(\"wkt\", \"Polygon\")) " // - + "}";// - - - - - String q28 = ""// - + "SELECT ?m ?n " // - + "{" // - + " FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " // - + " ?n ?m. "// - + "}";// - - - - - - - - @Test - public void testContextFilterFourIndex() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - SPARQLParser parser4 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q19, null); - ParsedQuery pq3 = parser3.parseQuery(q17, null); - ParsedQuery pq4 = parser4.parseQuery(q18, null); - - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq3.getTupleExpr()+ " , " +pq4.getTupleExpr()); - - - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - - - List list = new ArrayList(); - - list.add(extTup3); - list.add(extTup2); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - Set qSet = Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr())); - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - - Assert.assertEquals(2, eTupSet.size()); - - Set set = Sets.newHashSet(); - - for (QueryModelNode s : eTupSet) { - Set tempSet = Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s) - .getTupleExpr())); - set.addAll(tempSet); - - } - - - Assert.assertTrue(qSet.containsAll(set)); - } - - - - - @Test - public void testGeoIndexFunction() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q21, null); - ParsedQuery pq2 = parser2.parseQuery(q23, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - Set qSet = Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr())); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - - Set set = Sets.newHashSet(); - - Assert.assertEquals(1, eTupSet.size()); - - for (QueryModelNode s : eTupSet) { - Set tempSet = Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s) - .getTupleExpr())); - set.addAll(tempSet); - - } - - - - Assert.assertTrue(qSet.containsAll(set)); - - } - - - - @Test - public void testFreeTestIndexFunction() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q22, null); - ParsedQuery pq2 = parser2.parseQuery(q24, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Index is " + pq2.getTupleExpr()); - - - SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - - - List list = new ArrayList(); - list.add(extTup); - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - Set qSet = Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr())); - - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - - Set set = Sets.newHashSet(); - - Assert.assertEquals(2, eTupSet.size()); - - for (QueryModelNode s : eTupSet) { - Set tempSet = Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s) - .getTupleExpr())); - set.addAll(tempSet); - - } - - - Assert.assertTrue(qSet.containsAll(set)); - - } - - - @Test - public void testThreeIndexGeoFreeCompareFilterMix() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q25, null); - ParsedQuery pq2 = parser2.parseQuery(q24, null); - ParsedQuery pq3 = parser3.parseQuery(q26, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + pq3.getTupleExpr()); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - - List list = new ArrayList(); - list.add(extTup1); - list.add(extTup2); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - Set qSet = Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr())); - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - - Assert.assertEquals(2, eTupSet.size()); - - for (QueryModelNode s : eTupSet) { - Set tempSet = Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s) - .getTupleExpr())); - set.addAll(tempSet); - - } - - - Assert.assertTrue(qSet.containsAll(set)); - - } - - - - - - @Test - public void testFourIndexGeoFreeCompareFilterMix() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - SPARQLParser parser3 = new SPARQLParser(); - SPARQLParser parser4 = new SPARQLParser(); - - - ParsedQuery pq1 = parser1.parseQuery(q27, null); - ParsedQuery pq2 = parser2.parseQuery(q23, null); - ParsedQuery pq3 = parser3.parseQuery(q26, null); - ParsedQuery pq4 = parser4.parseQuery(q24, null); - - System.out.println("Query is " + pq1.getTupleExpr()); - System.out.println("Indexes are " + pq2.getTupleExpr() + " , " + pq3.getTupleExpr() + " , " + pq4.getTupleExpr()); - - - SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr())); - SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr())); - SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr())); - - - - List list = new ArrayList(); - - list.add(extTup1); - list.add(extTup2); - list.add(extTup3); - - - ExternalProcessor processor = new ExternalProcessor(list); - - TupleExpr tup = processor.process(pq1.getTupleExpr()); - - System.out.println("Processed query is " + tup); - - Set qSet = Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr())); - - ExternalTupleVstor eTup = new ExternalTupleVstor(); - tup.visit(eTup); - Set eTupSet = eTup.getExtTup(); - Set set = Sets.newHashSet(); - - Assert.assertEquals(3, eTupSet.size()); - - for (QueryModelNode s : eTupSet) { - Set tempSet = Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s) - .getTupleExpr())); - set.addAll(tempSet); - - } - - - Assert.assertTrue(qSet.containsAll(set)); - - - - } - - - - - - - -} diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java deleted file mode 100644 index 002a0e142..000000000 --- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java +++ /dev/null @@ -1,747 +0,0 @@ -package mvm.rya.indexing.external.tupleSet; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; -import java.util.Set; - -import mvm.rya.indexing.external.QueryVariableNormalizer; - -import org.junit.Assert; -import org.junit.Test; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -public class VarConstQueryVariableNormalizerTest { - - private String query1 = " " // - + "SELECT ?person ?address ?otherValue" // - + "{" // - + "?person a . " // - + "?person ."// - + "?person ?address." // - + "?person ?otherValue" // - + "}"; // - - private String index1 = " " // - + "SELECT ?X ?Y ?Z ?W" // - + "{"// - + "?X a . " // - + "?X ?Y."// - + "?X ?Z." // - + "?X ?W" // - + "}"; // - - - - private String q4 = ""// - + "SELECT ?s ?t ?u " // - + "{" // - + " ?s a ?t . "// - + " ?t ?u "// - + "}";// - - - - private String q7 = ""// - + "SELECT ?s ?t ?u ?x ?y ?z " // - + "{" // - + " ?s a ?t ."// - + " ?x a ?y ."// - + " ?t ?u ."// - + " ?y ?z ."// - + "}";// - - private String q8 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ?h ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?h ?r ."// - + " ?f ?m . "// - + " ?m ?a . "// - + " ?o ?r . "// - + "}";// - - private String q9 = ""// - + "SELECT ?f ?d ?e ?c ?n ?p ?a ?r " // - + "{" // - + " ?f a ."// - + " ?e a ."// - + " ?n a ."// - + " ?a a ."// - + " ?d ."// - + " ?c ."// - + " ?p ."// - + " ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - private String q10 = ""// - + "SELECT ?f ?m ?d " // - + "{" // - + " ?f a ?m ."// - + " ?m ?d ."// - + " ?d ?f . "// - + "}";// - - String q15 = ""// - + "SELECT ?x ?y ?z ?w " // - + "{" // - + " ?x ?y ?z ."// - + " ?y ?z ?w ."// - + "}";// - - String q16 = ""// - + "SELECT ?a ?b ?c " // - + "{" // - + " ?a ?b ?c ."// - + "}";// - - String q17 = ""// - + "SELECT ?q ?r " // - + "{" // - + " ?q ?r \"url:\" ."// - + "}";// - - private String q18 = ""// - + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?r " // - + "{" // - + " ?f a ?m ."// - + " ?e a ?l ."// - + " ?n a ?o ."// - + " ?a a ."// - + " ?m ?d ."// - + " ?l ?c ."// - + " ?o ?p ."// - + " ?r ."// - + " ?d ?f . "// - + " ?c ?e . "// - + " ?p ?n . "// - + " ?r ?a . "// - + "}";// - - - String q32 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point " // - + "{" // - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT \"wkt\" . "// - + " FILTER(geof:sfWithin(\"wkt\", \"Polygon\")) " // - + "}";// - - - String q33 = "PREFIX fts: "// - + "SELECT ?person ?commentmatch ?labelmatch" // - + "{" // - + " ?person a . "// - + " ?person ?labelmatch . "// - + " ?person ?commentmatch . "// - + " FILTER(fts:text(?labelmatch, \"sally\")) . " // - + " FILTER(fts:text(?commentmatch, \"bob\")) " // - + "}";// - - - String q34 = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?a ?b ?c ?d" // - + "{" // - + " ?a a geo:Feature . "// - + " ?b a geo:Point . "// - + " ?b geo:asWKT ?c . "// - + " FILTER(geof:sfWithin(?c, ?d)) " // - + "}";// - - - String q35 = "PREFIX fts: "// - + "SELECT ?a ?b ?c" // - + "{" // - + " ?a ?b . "// - + " FILTER(fts:text(?b, ?c)) " // - + "}";// - - - - - - - - - /** - * @param tuple1 - * @param tuple2 - * @return - * @throws Exception - */ - public boolean tupleEquals(TupleExpr tuple1, TupleExpr tuple2) throws Exception { - - Set spSet1 = Sets.newHashSet(StatementPatternCollector.process(tuple1)); - Set spSet2 = Sets.newHashSet(StatementPatternCollector.process(tuple2)); - - return spSet1.equals(spSet2); - - } - - /** - * @param tuple1 - * @param tuple2 - * @return - * @throws Exception - */ - public boolean isTupleSubset(TupleExpr tuple1, TupleExpr tuple2) throws Exception { - - Set spSet1 = Sets.newHashSet(StatementPatternCollector.process(tuple1)); - Set spSet2 = Sets.newHashSet(StatementPatternCollector.process(tuple2)); - - return (Sets.intersection(spSet1, spSet2).equals(spSet2)); - - } - - - - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext with two queries whose - * StatementPattern nodes contain no constant Vars. - */ - @Test - public void testNoConstants() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q15, null); - ParsedQuery pq2 = parser2.parseQuery(q16, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertEquals(2,normalize.size()); - for (TupleExpr s : normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), s)); - } - - pq1 = parser1.parseQuery(q16, null); - pq2 = parser2.parseQuery(q17, null); - normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), pq2.getTupleExpr()); - - Assert.assertTrue(normalize.size() == 0); - - } - - - - - - @Test - public void queryConstantNodeOneMatch() throws Exception { - - SPARQLParser p = new SPARQLParser(); - - ParsedQuery pq1 = p.parseQuery(query1, null); - ParsedQuery pq2 = p.parseQuery(index1, null); - - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - Assert.assertEquals(1, normalize.size()); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - } - - - - /** - * @throws Exception - * Tests QueryVariableNormalizerContext on the large query q9 - * with with a smaller, potential index q10 to see if the - * correct number of outputs are produced. - */ - @Test - public void querConstNodeFourMatch() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q9, null); - ParsedQuery pq2 = parser2.parseQuery(q10, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - - //System.out.println(normalize); - - Assert.assertEquals(4, normalize.size()); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - - - - - } - - - @Test - public void queryConstNodeSixMatch() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q9, null); - ParsedQuery pq2 = parser2.parseQuery(q18, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - - Assert.assertEquals(6, normalize.size()); - - //System.out.println("tuple expr is " +pq1.getTupleExpr() + " and normalized tuples are " + normalize); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - - - } - - - - @Test - public void queryConstGeoFilter() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q32, null); - ParsedQuery pq2 = parser2.parseQuery(q34, null); - - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - - - Assert.assertEquals(1, normalize.size()); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - - - - FilterCollector fc1 = new FilterCollector(); - pq1.getTupleExpr().visit(fc1); - List fList1 = fc1.getFilters(); - - for(TupleExpr te: normalize) { - FilterCollector fc2 = new FilterCollector(); - te.visit(fc2); - List fList2 = fc2.getFilters(); - - for(QueryModelNode q: fList2) { - Assert.assertTrue(fList1.contains(q)); - } - } - - - } - - - @Test - public void queryConstFreeTextFilter() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q33, null); - ParsedQuery pq2 = parser2.parseQuery(q35, null); - - System.out.println(pq1.getTupleExpr()); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - - - Assert.assertEquals(2, normalize.size()); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - - - - FilterCollector fc1 = new FilterCollector(); - pq1.getTupleExpr().visit(fc1); - List fList1 = fc1.getFilters(); - - for(TupleExpr te: normalize) { - FilterCollector fc2 = new FilterCollector(); - te.visit(fc2); - List fList2 = fc2.getFilters(); - - for(QueryModelNode q: fList2) { - Assert.assertTrue(fList1.contains(q)); - } - } - - - - - } - - - - - - @Test - public void queryConstNodeTwoMatch() throws Exception { - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q7, null); - ParsedQuery pq2 = parser2.parseQuery(q4, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - - Assert.assertEquals(2, normalize.size()); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - - - - - - } - - - - - - - - - @Test - public void queryNAryListMatch() throws Exception { - - - - String q1 = ""// - + "SELECT ?a ?b ?c ?d ?e ?f ?q ?g ?h " // - + "{" // - + " GRAPH ?x { " // - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?d ?e . "// - + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " // - + " FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " // - + " FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " // - + " ?x ?g. "// - + " ?b a ?q ."// - + " }"// - + "}";// - - - String q2 = ""// - + "SELECT ?m ?n ?r ?y " // - + "{" // - + " GRAPH ?q { " // - + " FILTER(?m IN (1,?y,3) && ?n NOT IN(?r,6,7)). " // - + " ?q ?m. "// - + " }"// - + "}";// - - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q1, null); - ParsedQuery pq2 = parser2.parseQuery(q2, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - - Assert.assertEquals(1, normalize.size()); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - - FilterCollector fc1 = new FilterCollector(); - pq1.getTupleExpr().visit(fc1); - List fList1 = fc1.getFilters(); - - for(TupleExpr te: normalize) { - FilterCollector fc2 = new FilterCollector(); - te.visit(fc2); - List fList2 = fc2.getFilters(); - - for(QueryModelNode q: fList2) { - Assert.assertTrue(fList1.contains(q)); - } - } - - - - } - - - - - - - @Test - public void queryCompoundFilterMatch() throws Exception { - - - - String q17 = ""// - + "SELECT ?j ?k ?l ?m ?n ?o " // - + "{" // - + " GRAPH ?z { " // - + " ?j ?k . "// - + " FILTER ( ?k < ?l && (?m > ?n || ?o = ?j) ). " // - + " }"// - + "}";// - -// String q18 = ""// -// + "SELECT ?r ?s ?t ?u " // -// + "{" // -// + " GRAPH ?q { " // -// + " FILTER(bound(?r) && sameTerm(?s,?t)&&bound(?u)). " // -// + " ?t a ?u ."// -// + " }"// -// + "}";// - - - - String q19 = ""// - + "SELECT ?a ?b ?c ?d ?f ?q ?g ?h " // - + "{" // - + " GRAPH ?x { " // - + " ?a a ?b ."// - + " ?b ?c ."// - + " ?d \"5\" . "// - + " FILTER ( \"5\" < ?f && (?a > ?b || ?c = ?d) ). " // - + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " // - + " FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " // - + " ?h ?g. "// - + " ?b a ?q ."// - + " }"// - + "}";// - - -// String q20 = ""// -// + "SELECT ?m ?n ?o " // -// + "{" // -// + " GRAPH ?q { " // -// + " FILTER(?m IN (1,?o,3) && ?n NOT IN(5,6,7)). " // -// + " ?n ?m. "// -// + " }"// -// + "}";// - - - - - SPARQLParser parser1 = new SPARQLParser(); - SPARQLParser parser2 = new SPARQLParser(); - - ParsedQuery pq1 = parser1.parseQuery(q19, null); - ParsedQuery pq2 = parser2.parseQuery(q17, null); - - List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), - pq2.getTupleExpr()); - - - - System.out.println(normalize); - - Assert.assertEquals(1, normalize.size()); - - for(TupleExpr te: normalize) { - Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); - } - - FilterCollector fc1 = new FilterCollector(); - pq1.getTupleExpr().visit(fc1); - List fList1 = fc1.getFilters(); - - for(TupleExpr te: normalize) { - FilterCollector fc2 = new FilterCollector(); - te.visit(fc2); - List fList2 = fc2.getFilters(); - - for(QueryModelNode q: fList2) { - Assert.assertTrue(fList1.contains(q)); - } - } - - - - } - - - - - -// @Test -// public void queryCompoundFilterMatch2() throws Exception { -// -// -// -// -// -// -// String q19 = ""// -// + "SELECT ?a ?b ?c ?d ?f ?q ?g ?h " // -// + "{" // -// + " GRAPH ?x { " // -// + " ?a a ?b ."// -// + " ?b ?c ."// -// + " ?d \"5\" . "// -// + " FILTER ( \"5\" < ?f && (?a > ?b || ?c = ?d) ). " // -// + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " // -// + " FILTER(?g IN (1,5,3) && ?h NOT IN(5,6,7)). " // -// + " ?h ?g. "// -// + " ?b a ?q ."// -// + " }"// -// + "}";// -// -// -// String q20 = ""// -// + "SELECT ?m ?n ?o ?f ?a ?b ?c ?d " // -// + "{" // -// + " GRAPH ?q { " // -// + " ?d ?o . "// -// + " FILTER ( ?o < ?f && (?a > ?b || ?c = ?d) ). " // -// + " FILTER(?m IN (1,?o,3) && ?n NOT IN(5,6,7)). " // -// + " ?n ?m. "// -// + " }"// -// + "}";// -// -// -// -// -// SPARQLParser parser1 = new SPARQLParser(); -// SPARQLParser parser2 = new SPARQLParser(); -// -// ParsedQuery pq1 = parser1.parseQuery(q19, null); -// ParsedQuery pq2 = parser2.parseQuery(q20, null); -// -// List normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), -// pq2.getTupleExpr()); -// -// -// -// System.out.println(normalize); -// -// Assert.assertEquals(1, normalize.size()); -// -// for(TupleExpr te: normalize) { -// Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te)); -// } -// -// FilterCollector fc1 = new FilterCollector(); -// pq1.getTupleExpr().visit(fc1); -// List fList1 = fc1.getFilters(); -// -// for(TupleExpr te: normalize) { -// FilterCollector fc2 = new FilterCollector(); -// te.visit(fc2); -// List fList2 = fc2.getFilters(); -// -// for(QueryModelNode q: fList2) { -// Assert.assertTrue(fList1.contains(q)); -// } -// } -// -// -// -// } -// -// - - - - - - - - - - - - private static class FilterCollector extends QueryModelVisitorBase { - - private List filterList = Lists.newArrayList(); - - public List getFilters() { - return filterList; - } - - @Override - public void meet(Filter node) { - filterList.add(node.getCondition()); - super.meet(node); - } - - } - - - - - - -} diff --git a/extras/indexingExample/pom.xml b/extras/indexingExample/pom.xml deleted file mode 100644 index 2ca3417a1..000000000 --- a/extras/indexingExample/pom.xml +++ /dev/null @@ -1,99 +0,0 @@ - - - - - 4.0.0 - - org.apache.rya - rya.extras - 3.2.10-SNAPSHOT - - - rya.indexing.example - Apache Rya Secondary Indexing Example - - - - org.apache.rya - rya.prospector - - - - org.apache.rya - mongodb.rya - - - org.apache.rya - rya.indexing - - - org.apache.rya - rya.indexing - accumulo-server - - - - org.apache.rya - rya.indexing - map-reduce - - - - org.apache.accumulo - accumulo-core - - - - org.locationtech.geomesa - geomesa-distributed-runtime - - - - - - - org.apache.rat - apache-rat-plugin - - - - **/src/main/vagrant/.vagrant/** - - - - - maven-assembly-plugin - - - src/main/assembly/assembly.xml - - - - - package - - single - - - - - - - diff --git a/extras/indexingExample/src/main/assembly/assembly.xml b/extras/indexingExample/src/main/assembly/assembly.xml deleted file mode 100644 index 0e8fd6dd3..000000000 --- a/extras/indexingExample/src/main/assembly/assembly.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - - - distribution - - zip - - false - - - - accumulo/lib/ext - - org.apache.rya:rya.indexing:*:accumulo-server - org.locationtech.geomesa:geomesa-distributed-runtime:* - - - - map-reduce - - org.apache.rya:rya.indexing:*:map-reduce - - - - dist/lib - - * - - - - org.apache.rya:rya.indexing.example - - - org.apache.rya:rya.indexing:*:accumulo-server - org.apache.rya:rya.indexing:*:map-reduce - - test - - - - - src/main/scripts/RunRyaDirectExample.bat - dist - - - src/main/java/RyaDirectExample.java - dist - - - diff --git a/extras/indexingExample/src/main/java/EntityDirectExample.java b/extras/indexingExample/src/main/java/EntityDirectExample.java deleted file mode 100644 index ae8352093..000000000 --- a/extras/indexingExample/src/main/java/EntityDirectExample.java +++ /dev/null @@ -1,311 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.List; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.indexing.RyaSailFactory; -import mvm.rya.indexing.accumulo.ConfigUtils; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.commons.lang.Validate; -import org.apache.hadoop.conf.Configuration; -import org.apache.log4j.Logger; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.Update; -import org.openrdf.query.UpdateExecutionException; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; - -public class EntityDirectExample { - private static final Logger log = Logger.getLogger(EntityDirectExample.class); - - // - // Connection configuration parameters - // - - private static final boolean USE_MOCK_INSTANCE = true; - private static final boolean PRINT_QUERIES = true; - private static final String INSTANCE = "instance"; - private static final String RYA_TABLE_PREFIX = "x_test_triplestore_"; - private static final String AUTHS = "U"; - - public static void main(String[] args) throws Exception { - Configuration conf = getConf(); - conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES); - - log.info("Creating the tables as root."); - SailRepository repository = null; - SailRepositoryConnection conn = null; - - try { - log.info("Connecting to Indexing Sail Repository."); - - Sail extSail = RyaSailFactory.getInstance(conf); - repository = new SailRepository(extSail); - repository.initialize(); - conn = repository.getConnection(); - - log.info("Running SPARQL Example: Add and Delete"); - testAddAndDelete(conn); - log.info("Running SAIL/SPARQL Example: Add and Temporal Search"); - testAddAndTemporalSearchWithPCJ(conn); - - } finally { - log.info("Shutting down"); - closeQuietly(conn); - closeQuietly(repository); - } - } - - private static void closeQuietly(SailRepository repository) { - if (repository != null) { - try { - repository.shutDown(); - } catch (RepositoryException e) { - // quietly absorb this exception - } - } - } - - private static void closeQuietly(SailRepositoryConnection conn) { - if (conn != null) { - try { - conn.close(); - } catch (RepositoryException e) { - // quietly absorb this exception - } - } - } - - - - - - public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException, - RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, - AccumuloException, AccumuloSecurityException, TableNotFoundException { - - // Add data - String query = "INSERT DATA\n"// - + "{ GRAPH {\n"// - + " " // - + " \"A new book\" ;\n"// - + " \"Avocados\" .\n" + "} }"; - - log.info("Performing Query"); - - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - update.execute(); - - query = "select ?x {GRAPH {?x \"A new book\" . "// - + " ?x \"Avocados\" }}"; - CountingResultHandler resultHandler = new CountingResultHandler(); - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(resultHandler); - log.info("Result count : " + resultHandler.getCount()); - - Validate.isTrue(resultHandler.getCount() == 1); - resultHandler.resetCount(); - - //TODO delete currently not implemented in AccumuloRyaDAO for -// // Delete Data -// query = "DELETE DATA\n" // -// + "{ GRAPH {\n" -// + " \"A new book\" ;\n" -// + " \"Avocados\" .\n" + "}}"; -// -// update = conn.prepareUpdate(QueryLanguage.SPARQL, query); -// update.execute(); -// -// query = "select ?x {GRAPH {?x \"A new book\" . "// -// + " ?x \"Avocados\" }}"; -// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(resultHandler); -// log.info("Result count : " + resultHandler.getCount()); -// -// Validate.isTrue(resultHandler.getCount() == 0); - } - - - - - - private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception { - - // create some resources and literals to make statements out of - - String sparqlInsert = "PREFIX pref: \n" - + "INSERT DATA {\n" // - + " a pref:Person ;\n" // - + " pref:hasProperty1 'property1' ;\n" // one second - + " pref:hasProperty2 'property2' ;\n" // 2 seconds - + " pref:hasProperty3 'property3' .\n" // 3 seconds - + " a pref:Person ; \n" // - + " pref:hasProperty4 'property4' ; \n" // - + " pref:hasProperty5 'property5' ; \n" // - + "}"; - - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert); - update.execute(); - - String queryString = "PREFIX pref: \n" // - + "SELECT ?x ?z \n" // - + "WHERE { \n" - + " ?x a ?z. \n" - + " ?x pref:hasProperty1 'property1' . \n"// - + " ?x pref:hasProperty2 'property2' . \n"// - + " ?x pref:hasProperty3 'property3' . \n"// - + "}";// - - - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - CountingResultHandler tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - Validate.isTrue(tupleHandler.getBsSize() == 2); - - queryString = "PREFIX pref: \n" // - + "SELECT ?x ?w ?z \n" // - + "WHERE { \n" - + " ?x a ?z. \n" - + " ?x pref:hasProperty4 'property4' . \n"// - + " ?x pref:hasProperty5 ?w . \n"// - + "}";// - - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - Validate.isTrue(tupleHandler.getBsSize() == 3); - - - queryString = "PREFIX pref: " - + "SELECT ?v ?w ?x ?y ?z " - + "WHERE { " - + " ?w a ?z . " - + " ?w pref:hasProperty1 ?v . " - + " ?w pref:hasProperty2 'property2' . " - + " ?w pref:hasProperty3 'property3' . " - + " ?x a ?z . " - + " ?x pref:hasProperty4 'property4' . " - + " ?x pref:hasProperty5 ?y . " - + "}"; - - - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - Validate.isTrue(tupleHandler.getBsSize() == 5); - - } - - - private static Configuration getConf() { - - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE); - conf.set(ConfigUtils.USE_ENTITY, "true"); - conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX); - conf.set(ConfigUtils.ENTITY_TABLENAME, RYA_TABLE_PREFIX + "entity"); - conf.set(ConfigUtils.CLOUDBASE_USER, "root"); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, ""); - conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE); - conf.setInt(ConfigUtils.NUM_PARTITIONS, 3); - conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS); - - return conf; - } - - - private static class CountingResultHandler implements TupleQueryResultHandler { - private int count = 0; - private int bindingSize = 0; - private boolean bsSizeSet = false; - - public int getCount() { - return count; - } - - public int getBsSize() { - return bindingSize; - } - - public void resetBsSize() { - bindingSize = 0; - bsSizeSet = false; - } - - public void resetCount() { - this.count = 0; - } - - @Override - public void startQueryResult(List arg0) throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException { - count++; - if(!bsSizeSet) { - bindingSize = arg0.size(); - bsSizeSet = true; - } - System.out.println(arg0); - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - } -} diff --git a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java deleted file mode 100644 index 860df06d7..000000000 --- a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import java.util.List; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.indexing.RyaSailFactory; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.mongodb.MongoDBRdfConfiguration; - -import org.apache.commons.lang.Validate; -import org.apache.hadoop.conf.Configuration; -import org.apache.log4j.Logger; -import org.openrdf.model.Namespace; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.Update; -import org.openrdf.query.UpdateExecutionException; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.RepositoryResult; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; - -public class MongoRyaDirectExample { - private static final Logger log = Logger.getLogger(MongoRyaDirectExample.class); - - // - // Connection configuration parameters - // - - private static final boolean PRINT_QUERIES = true; - private static final String MONGO_DB = "rya"; - private static final String MONGO_COLL_PREFIX = "rya_"; - - public static void main(String[] args) throws Exception { - Configuration conf = getConf(); - conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES); - - SailRepository repository = null; - SailRepositoryConnection conn = null; - try { - log.info("Connecting to Indexing Sail Repository."); - Sail sail = RyaSailFactory.getInstance(conf); - repository = new SailRepository(sail); - repository.initialize(); - conn = repository.getConnection(); - - long start = System.currentTimeMillis(); - log.info("Running SPARQL Example: Add and Delete"); - testAddAndDelete(conn); - testAddAndDeleteNoContext(conn); - testAddNamespaces(conn); - testAddPointAndWithinSearch(conn); - - log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.); - } finally { - log.info("Shutting down"); - closeQuietly(conn); - closeQuietly(repository); - } - } - - private static void testAddPointAndWithinSearch(SailRepositoryConnection conn) throws Exception { - - String update = "PREFIX geo: "// - + "INSERT DATA { " // - + " a geo:Feature ; " // - + " geo:hasGeometry [ " // - + " a geo:Point ; " // - + " geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "// - + " ] . " // - + "}"; - - Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update); - u.execute(); - - String queryString; - TupleQuery tupleQuery; - CountingResultHandler tupleHandler; - - // ring containing point - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt " // - + "{" // - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs - - // ring outside point - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt " // - + "{" // - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 0); - } - - private static void closeQuietly(SailRepository repository) { - if (repository != null) { - try { - repository.shutDown(); - } catch (RepositoryException e) { - // quietly absorb this exception - } - } - } - - private static void closeQuietly(SailRepositoryConnection conn) { - if (conn != null) { - try { - conn.close(); - } catch (RepositoryException e) { - // quietly absorb this exception - } - } - } - - private static Configuration getConf() { - - Configuration conf = new Configuration(); - conf.set(ConfigUtils.USE_MONGO, "true"); - conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true"); - conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, MONGO_DB); - conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, MONGO_COLL_PREFIX); - conf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT"); - conf.set(ConfigUtils.USE_GEO, "true"); - conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, MONGO_COLL_PREFIX); - - return conf; - } - - - - public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException, - UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException { - - // Add data - String query = "INSERT DATA\n"// - + "{ GRAPH {\n"// - + " " // - + " \"A new book\" ;\n"// - + " \"Avocados\" .\n" + "} }"; - - log.info("Performing Query"); - - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - update.execute(); - - query = "select ?p ?o { GRAPH { ?p ?o . }}"; - CountingResultHandler resultHandler = new CountingResultHandler(); - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(resultHandler); - log.info("Result count : " + resultHandler.getCount()); - - Validate.isTrue(resultHandler.getCount() == 2); - - resultHandler.resetCount(); - - // Delete Data - query = "DELETE DATA\n" // - + "{ GRAPH {\n" - + " \"A new book\" ;\n" - + " \"Avocados\" .\n" + "}}"; - - update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - update.execute(); - - query = "select ?p ?o { GRAPH { ?p ?o . }}"; - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(resultHandler); - log.info("Result count : " + resultHandler.getCount()); - - Validate.isTrue(resultHandler.getCount() == 0); - } - - public static void testAddNamespaces(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException, - UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException { - - conn.setNamespace("rya", "http://rya.com"); - RepositoryResult results = conn.getNamespaces(); - for (Namespace space : results.asList()){ - System.out.println(space.getName() + ", " + space.getPrefix()); - } - } - - public static void testAddAndDeleteNoContext(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException, - UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException { - - // Add data - String query = "INSERT DATA\n"// - + "{ \n"// - + " " // - + " \"A new book\" ;\n"// - + " \"Avocados\" .\n" + " }"; - - log.info("Performing Query"); - - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - update.execute(); - - query = "select ?p ?o { ?p ?o . }"; - CountingResultHandler resultHandler = new CountingResultHandler(); - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(resultHandler); - log.info("Result count : " + resultHandler.getCount()); - - Validate.isTrue(resultHandler.getCount() == 2); - - resultHandler.resetCount(); - - // Delete Data - query = "DELETE DATA\n" // - + "{ \n" - + " \"A new book\" ;\n" - + " \"Avocados\" .\n" + "}"; - - update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - update.execute(); - - query = "select ?p ?o { { ?p ?o . }}"; - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(resultHandler); - log.info("Result count : " + resultHandler.getCount()); - - Validate.isTrue(resultHandler.getCount() == 0); - } - - private static class CountingResultHandler implements TupleQueryResultHandler { - private int count = 0; - - public int getCount() { - return count; - } - - public void resetCount() { - this.count = 0; - } - - @Override - public void startQueryResult(List arg0) throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException { - count++; - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - } -} diff --git a/extras/indexingExample/src/main/java/RyaDirectExample.java b/extras/indexingExample/src/main/java/RyaDirectExample.java deleted file mode 100644 index b3e8dae57..000000000 --- a/extras/indexingExample/src/main/java/RyaDirectExample.java +++ /dev/null @@ -1,700 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.util.List; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.indexing.RyaSailFactory; -import mvm.rya.indexing.accumulo.ConfigUtils; -import mvm.rya.indexing.accumulo.geo.GeoConstants; -import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.MutationsRejectedException; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.commons.lang.Validate; -import org.apache.hadoop.conf.Configuration; -import org.apache.log4j.Logger; -import org.openrdf.model.URI; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.LiteralImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.Update; -import org.openrdf.query.UpdateExecutionException; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; -import org.openrdf.sail.SailException; - -public class RyaDirectExample { - private static final Logger log = Logger.getLogger(RyaDirectExample.class); - - // - // Connection configuration parameters - // - - private static final boolean USE_MOCK_INSTANCE = true; - private static final boolean PRINT_QUERIES = true; - private static final String INSTANCE = "instance"; - private static final String RYA_TABLE_PREFIX = "x_test_triplestore_"; - private static final String AUTHS = ""; - - - - public static void main(String[] args) throws Exception { - Configuration conf = getConf(); - conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES); - - log.info("Creating the tables as root."); -// createTables(addRootConf(conf), conf); - - SailRepository repository = null; - SailRepositoryConnection conn = null; - - try { - log.info("Connecting to Indexing Sail Repository."); - - Sail extSail = RyaSailFactory.getInstance(conf); - repository = new SailRepository(extSail); - repository.initialize(); - conn = repository.getConnection(); - - createPCJ(conn); - - long start = System.currentTimeMillis(); - log.info("Running SPARQL Example: Add and Delete"); - testAddAndDelete(conn); - log.info("Running SAIL/SPARQL Example: PCJ Search"); - testPCJSearch(conn); - log.info("Running SAIL/SPARQL Example: Add and Temporal Search"); - testAddAndTemporalSearchWithPCJ(conn); - log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ"); - testAddAndFreeTextSearchWithPCJ(conn); - log.info("Running SPARQL Example: Add Point and Geo Search with PCJ"); - testAddPointAndWithinSearchWithPCJ(conn); - log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search"); - testTemporalFreeGeoSearch(conn); - log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search"); - testGeoFreetextWithPCJSearch(conn); - - log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.); - } finally { - log.info("Shutting down"); - closeQuietly(conn); - closeQuietly(repository); - } - } - - private static void closeQuietly(SailRepository repository) { - if (repository != null) { - try { - repository.shutDown(); - } catch (RepositoryException e) { - // quietly absorb this exception - } - } - } - - private static void closeQuietly(SailRepositoryConnection conn) { - if (conn != null) { - try { - conn.close(); - } catch (RepositoryException e) { - // quietly absorb this exception - } - } - } - - private static Configuration getConf() { - - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE); - conf.set(ConfigUtils.USE_PCJ, "true"); - conf.set(ConfigUtils.USE_GEO, "true"); - conf.set(ConfigUtils.USE_FREETEXT, "true"); - conf.set(ConfigUtils.USE_TEMPORAL, "true"); - conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX); - conf.set(ConfigUtils.CLOUDBASE_USER, "root"); - conf.set(ConfigUtils.CLOUDBASE_PASSWORD, ""); - conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE); - conf.setInt(ConfigUtils.NUM_PARTITIONS, 3); - conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS); - - // only geo index statements with geo:asWKT predicates - conf.set(ConfigUtils.GEO_PREDICATES_LIST, GeoConstants.GEO_AS_WKT.stringValue()); - return conf; - } - - public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException, - RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, - AccumuloException, AccumuloSecurityException, TableNotFoundException { - - // Add data - String query = "INSERT DATA\n"// - + "{ GRAPH {\n"// - + " " // - + " \"A new book\" ;\n"// - + " \"Avocados\" .\n" + "} }"; - - log.info("Performing Query"); - - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - update.execute(); - - query = "select ?p ?o { GRAPH { ?p ?o . }}"; - CountingResultHandler resultHandler = new CountingResultHandler(); - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(resultHandler); - log.info("Result count : " + resultHandler.getCount()); - - Validate.isTrue(resultHandler.getCount() == 2); - resultHandler.resetCount(); - - // Delete Data - query = "DELETE DATA\n" // - + "{ GRAPH {\n" - + " \"A new book\" ;\n" - + " \"Avocados\" .\n" + "}}"; - - update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - update.execute(); - - query = "select ?p ?o { GRAPH { ?p ?o . }}"; - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(resultHandler); - log.info("Result count : " + resultHandler.getCount()); - - Validate.isTrue(resultHandler.getCount() == 0); - } - - - private static void testPCJSearch(SailRepositoryConnection conn) throws Exception { - - String queryString; - TupleQuery tupleQuery; - CountingResultHandler tupleHandler; - - // ///////////// search for bob - queryString = "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - - // ///////////// search for bob - queryString = "PREFIX fts: "// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?c a ?e . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 2); - - } - - - - - private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception { - - // create some resources and literals to make statements out of - - String sparqlInsert = "PREFIX time: \n" - + "INSERT DATA {\n" // - + "_:eventz a time:Instant ;\n" - + " time:inXSDDateTime '2001-01-01T01:01:01-08:00' ;\n" // one second - + " time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^ ;\n" // 2 seconds - + " time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" // 3 seconds - + " time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" // 4 seconds - + " time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n" - + " time:inXSDDateTime '2006-01-01' ;\n" - + " time:inXSDDateTime '2007-01-01' ;\n" - + " time:inXSDDateTime '2008-01-01' ; .\n" - + "}"; - - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert); - update.execute(); - - // Find all stored dates. - String queryString = "PREFIX time: \n"// - + "PREFIX tempo: \n"// - + "SELECT ?event ?time \n" // - + "WHERE { \n" - + " ?event time:inXSDDateTime ?time . \n"// - + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds - + "}";// - - - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - CountingResultHandler tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 5); - - // Find all stored dates. - queryString = "PREFIX time: \n"// - + "PREFIX tempo: \n"// - + "SELECT ?event ?time \n" // - + "WHERE { \n" - + " ?event time:inXSDDateTime ?time . \n"// - + " ?event a time:Instant . \n"// - + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds - + "}";// - - - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 5); - - - // Find all stored dates. - queryString = "PREFIX time: \n"// - + "PREFIX tempo: \n"// - + "SELECT ?event ?time ?e ?c ?l ?o \n" // - + "WHERE { \n" - + " ?e a ?c . \n"// - + " ?e ?l . \n"// - + " ?e ?o . \n"// - + " ?event a time:Instant . \n"// - + " ?event time:inXSDDateTime ?time . \n"// - + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds - + "}";// - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 5); - } - - - - - - - private static void testAddAndFreeTextSearchWithPCJ(SailRepositoryConnection conn) throws Exception { - // add data to the repository using the SailRepository add methods - ValueFactory f = conn.getValueFactory(); - URI person = f.createURI("http://example.org/ontology/Person"); - - String uuid; - - uuid = "urn:people:alice"; - conn.add(f.createURI(uuid), RDF.TYPE, person); - conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string"))); - - uuid = "urn:people:bobss"; - conn.add(f.createURI(uuid), RDF.TYPE, person); - conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en")); - - String queryString; - TupleQuery tupleQuery; - CountingResultHandler tupleHandler; - - // ///////////// search for alice - queryString = "PREFIX fts: "// - + "SELECT ?person ?match ?e ?c ?l ?o " // - + "{" // - + " ?person ?match . "// - + " FILTER(fts:text(?match, \"pal*\")) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - - - // ///////////// search for alice and bob - queryString = "PREFIX fts: "// - + "SELECT ?person ?match " // - + "{" // - + " ?person ?match . "// - + " ?person a . "// - + " FILTER(fts:text(?match, \"(alice | bob) *SE\")) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 2); - - // ///////////// search for alice and bob - queryString = "PREFIX fts: "// - + "SELECT ?person ?match " // - + "{" // - + " ?person a . "// - + " ?person ?match . "// - + " FILTER(fts:text(?match, \"(alice | bob) *SE\")) " // - + " FILTER(fts:text(?match, \"pal*\")) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - - - // ///////////// search for bob - queryString = "PREFIX fts: "// - + "SELECT ?person ?match ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?person a . "// - + " ?person ?match . "// - + " FILTER(fts:text(?match, \"!alice & hose\")) " // - + "}";// - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - } - - - - private static void testAddPointAndWithinSearchWithPCJ(SailRepositoryConnection conn) throws Exception { - - String update = "PREFIX geo: "// - + "INSERT DATA { " // - + " a geo:Feature ; " // - + " geo:hasGeometry [ " // - + " a geo:Point ; " // - + " geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "// - + " ] . " // - + "}"; - - Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update); - u.execute(); - - String queryString; - TupleQuery tupleQuery; - CountingResultHandler tupleHandler; - - // point outside search ring - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt " // - + "{" // - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 0); - - // point inside search ring - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt ?e ?l ?o" // - + "{" // - + " ?feature a ?e . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " // - + "}";// - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - - - // point inside search ring with Pre-Computed Join - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt ?e ?l ?o" // - + "{" // - + " ?feature a ?e . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " // - + "}";// - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs - - // point outside search ring with PCJ - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt ?e ?l ?o " // - + "{" // - + " ?feature a ?e . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 0); - - // point inside search ring with different Pre-Computed Join - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " // - + "}";// - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - } - - - private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException, - RepositoryException, UpdateExecutionException, TupleQueryResultHandlerException, QueryEvaluationException { - - - String queryString; - TupleQuery tupleQuery; - CountingResultHandler tupleHandler; - - // ring containing point - queryString = "PREFIX geo: "// - + "PREFIX geof: "// - + "PREFIX time: "// - + "PREFIX tempo: "// - + "PREFIX fts: "// - + "SELECT ?feature ?point ?wkt ?event ?time ?person ?match" // - + "{" // - + " ?event a time:Instant . \n"// - + " ?event time:inXSDDateTime ?time . \n"// - + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)). " // - + " ?person a . "// - + " ?person ?match . "// - + " FILTER(fts:text(?match, \"pal*\")) " // - + "}";// - - - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - - tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 5); - - } - - - - private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException, - RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException { - // ring outside point - String queryString = "PREFIX geo: "// - + "PREFIX fts: "// - + "PREFIX geof: "// - + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o ?person ?match " // - + "{" // - + " ?person a . "// - + " ?person ?match . "// - + " FILTER(fts:text(?match, \"!alice & hose\")) " // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + " ?feature a geo:Feature . "// - + " ?feature geo:hasGeometry ?point . "// - + " ?point a geo:Point . "// - + " ?point geo:asWKT ?wkt . "// - + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " // - + "}";// - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); - CountingResultHandler tupleHandler = new CountingResultHandler(); - tupleQuery.evaluate(tupleHandler); - log.info("Result count : " + tupleHandler.getCount()); - Validate.isTrue(tupleHandler.getCount() == 1); - } - - - - private static void createPCJ(SailRepositoryConnection conn) - throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException { - - String queryString1 = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?c a ?e . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - String queryString2 = ""// - + "SELECT ?e ?c ?l ?o " // - + "{" // - + " ?e a ?c . "// - + " ?e ?l . "// - + " ?e ?o . "// - + "}";// - - - URI obj,subclass,talksTo; - URI person = new URIImpl("urn:people:alice"); - URI feature = new URIImpl("urn:feature"); - URI sub = new URIImpl("uri:entity"); - subclass = new URIImpl("uri:class"); - obj = new URIImpl("uri:obj"); - talksTo = new URIImpl("uri:talksTo"); - - conn.add(person, RDF.TYPE, sub); - conn.add(feature, RDF.TYPE, sub); - conn.add(sub, RDF.TYPE, subclass); - conn.add(sub, RDFS.LABEL, new LiteralImpl("label")); - conn.add(sub, talksTo, obj); - - AccumuloIndexSet ais1 = null; - AccumuloIndexSet ais2 = null; - String tablename1 = RYA_TABLE_PREFIX + "INDEX_1"; - String tablename2 = RYA_TABLE_PREFIX + "INDEX_2"; - - Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes())); - accCon.tableOperations().create(tablename1); - accCon.tableOperations().create(tablename2); - - try { - ais1 = new AccumuloIndexSet(queryString1, conn, accCon, tablename1); - ais2 = new AccumuloIndexSet(queryString2, conn, accCon, tablename2); - } catch (MalformedQueryException e) { - e.printStackTrace(); - } catch (SailException e) { - e.printStackTrace(); - } catch (QueryEvaluationException e) { - e.printStackTrace(); - } catch (MutationsRejectedException e) { - e.printStackTrace(); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } - - } - - - private static class CountingResultHandler implements TupleQueryResultHandler { - private int count = 0; - - public int getCount() { - return count; - } - - public void resetCount() { - this.count = 0; - } - - @Override - public void startQueryResult(List arg0) throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException { - count++; - System.out.println(arg0); - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - // TODO Auto-generated method stub - - } - } -} diff --git a/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat b/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat deleted file mode 100644 index a89e3d1c3..000000000 --- a/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat +++ /dev/null @@ -1,41 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one -rem or more contributor license agreements. See the NOTICE file -rem distributed with this work for additional information -rem regarding copyright ownership. The ASF licenses this file -rem to you under the Apache License, Version 2.0 (the -rem "License"); you may not use this file except in compliance -rem with the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, -rem software distributed under the License is distributed on an -rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -rem KIND, either express or implied. See the License for the -rem specific language governing permissions and limitations -rem under the License. -SET CP= - -REM Check to see if javac is on the path -where /Q javac -IF %ERRORLEVEL% NEQ 0 goto :NO_JAVAC - - -for /f %%f in ('DIR /b .\lib\*.jar') do call :append .\lib\%%f - -javac -cp "%CP%" RyaDirectExample.java -java -cp "%CP%" RyaDirectExample - -goto :end - -:append -@echo off -SET CP=%CP%%1; -goto :end - -:NO_JAVAC -echo ERROR: Could not find javac -goto :end - -:end diff --git a/extras/pom.xml b/extras/pom.xml deleted file mode 100644 index f3a88d300..000000000 --- a/extras/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - - rya.extras - Apache Rya Extra Projects - - pom - - - rya.prospector - rya.manual - tinkerpop.rya - rya.console - indexing - indexingExample - - diff --git a/extras/rya.console/.gitignore b/extras/rya.console/.gitignore deleted file mode 100644 index 5d1172ace..000000000 --- a/extras/rya.console/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -/.classpath -/.project -.settings/ -target/ -/log.roo -*.log - -/bin/ diff --git a/extras/rya.console/pom.xml b/extras/rya.console/pom.xml deleted file mode 100644 index 1bbb5a046..000000000 --- a/extras/rya.console/pom.xml +++ /dev/null @@ -1,100 +0,0 @@ - - - - - 4.0.0 - - org.apache.rya - rya.extras - 3.2.10-SNAPSHOT - - - rya.console - Apache Rya Console - - - org.springframework.shell.Bootstrap - - - - - org.apache.rya - rya.api - - - org.apache.rya - accumulo.rya - - - jline - jline - - - - - - org.springframework.shell - spring-shell - - - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy-dependencies - prepare-package - - copy-dependencies - - - ${project.build.directory}/lib - true - true - true - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - true - - lib/ - ${jar.mainclass} - - - ${project.version} - - - - - - - - diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java deleted file mode 100644 index 2d0fac832..000000000 --- a/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java +++ /dev/null @@ -1,69 +0,0 @@ -package mvm.rya.console; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.plugin.support.DefaultBannerProvider; -import org.springframework.shell.support.util.OsUtils; -import org.springframework.stereotype.Component; - -/** - * @author Jarred Li - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class RyaBannerProvider extends DefaultBannerProvider - implements CommandMarker { - - @CliCommand(value = {"version"}, help = "Displays current CLI version") - @Override - public String getBanner() { - StringBuffer buf = new StringBuffer(); - buf.append("" + - "________ _________ ______ \n" + - "___ __ \\____ _______ _ __ ____/____________________________ /____ \n" + - "__ /_/ /_ / / / __ `/ _ / _ __ \\_ __ \\_ ___/ __ \\_ /_ _ \\\n" + - "_ _, _/_ /_/ // /_/ / / /___ / /_/ / / / /(__ )/ /_/ / / / __/\n" + - "/_/ |_| _\\__, / \\__,_/ \\____/ \\____//_/ /_//____/ \\____//_/ \\___/ \n" + - " /____/ " + OsUtils.LINE_SEPARATOR); - buf.append("Version:" + this.getVersion()); - return buf.toString(); - - } - - @Override - public String getVersion() { - return "3.0.0"; - } - - @Override - public String getWelcomeMessage() { - return "Welcome to the Rya Console"; - } - - @Override - public String getProviderName() { - return "rya"; - } -} diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java deleted file mode 100644 index 3f63b2053..000000000 --- a/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java +++ /dev/null @@ -1,230 +0,0 @@ -package mvm.rya.console; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import info.aduna.iteration.CloseableIteration; - -import java.io.FileInputStream; -import java.io.StringReader; -import java.util.Formatter; -import java.util.Locale; -import java.util.logging.Level; -import java.util.logging.Logger; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.RyaQueryEngine; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaContext; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.openrdf.model.Statement; -import org.openrdf.rio.RDFHandler; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.ntriples.NTriplesParserFactory; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliAvailabilityIndicator; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; - -@Component -public class RyaConsoleCommands implements CommandMarker { - - private static final NTriplesParserFactory N_TRIPLES_PARSER_FACTORY = new NTriplesParserFactory(); - - protected final Logger LOG = Logger.getLogger(getClass().getName()); - - private RyaContext ryaContext = RyaContext.getInstance(); - private RyaDAO ryaDAO; - private RDFParser ntrips_parser = null; - - public RyaConsoleCommands() { - ntrips_parser = N_TRIPLES_PARSER_FACTORY.getParser(); - ntrips_parser.setRDFHandler(new RDFHandler() { - - public void startRDF() throws RDFHandlerException { - - } - - public void endRDF() throws RDFHandlerException { - - } - - public void handleNamespace(String s, String s1) throws RDFHandlerException { - - } - - public void handleStatement(Statement statement) throws RDFHandlerException { - try { - RyaStatement ryaStatement = RdfToRyaConversions.convertStatement(statement); - ryaDAO.add(ryaStatement); - } catch (Exception e) { - throw new RDFHandlerException(e); - } - } - - public void handleComment(String s) throws RDFHandlerException { - - } - }); - } - - /** - * commands: - * 1. connect(instance, user, password, zk) - * 1.a. disconnect - * 2. query - * 3. add - */ - - @CliAvailabilityIndicator({"connect"}) - public boolean isConnectAvailable() { - return true; - } - - @CliAvailabilityIndicator({"qt", "add", "load", "disconnect"}) - public boolean isCommandAvailable() { - return ryaDAO != null; - } - - @CliCommand(value = "qt", help = "Query with Triple Pattern") - public String queryTriple( - @CliOption(key = {"subject"}, mandatory = false, help = "Subject") final String subject, - @CliOption(key = {"predicate"}, mandatory = false, help = "Predicate") final String predicate, - @CliOption(key = {"object"}, mandatory = false, help = "Object") final String object, - @CliOption(key = {"context"}, mandatory = false, help = "Context") final String context, - @CliOption(key = {"maxResults"}, mandatory = false, help = "Maximum Number of Results", unspecifiedDefaultValue = "100") final String maxResults - ) { - try { - RdfCloudTripleStoreConfiguration conf = ryaDAO.getConf().clone(); - if (maxResults != null) { - conf.setLimit(Long.parseLong(maxResults)); - } - RyaQueryEngine queryEngine = ryaDAO.getQueryEngine(); - CloseableIteration query = - queryEngine.query(new RyaStatement( - (subject != null) ? (new RyaURI(subject)) : null, - (predicate != null) ? (new RyaURI(predicate)) : null, - (object != null) ? (new RyaURI(object)) : null, - (context != null) ? (new RyaURI(context)) : null), conf); - StringBuilder sb = new StringBuilder(); - Formatter formatter = new Formatter(sb, Locale.US); - String format = "%-40s %-40s %-40s %-40s\n"; - formatter.format(format, "Subject", "Predicate", - "Object", "Context"); - while (query.hasNext()) { - RyaStatement next = query.next(); - formatter.format(format, next.getSubject().getData(), next.getPredicate().getData(), - next.getObject().getData(), (next.getContext() != null) ? (next.getContext().getData()) : (null)); - sb.append("\n"); - } - return sb.toString(); - } catch (Exception e) { - LOG.log(Level.SEVERE, "", e); - } - return ""; - } - - @CliCommand(value = "load", help = "Load file") - public void load( - @CliOption(key = {"", "file"}, mandatory = true, help = "File of ntriples rdf to load") final String file - ) { - //diff formats? - //diff types of urls - try { - ntrips_parser.parse(new FileInputStream(file), ""); - } catch (Exception e) { - LOG.log(Level.SEVERE, "", e); - } - } - - @CliCommand(value = "add", help = "Add Statement") - public void add( - @CliOption(key = {"", "statement"}, mandatory = true, help = "Statement in NTriples format") final String statement) { - try { - ntrips_parser.parse(new StringReader(statement), ""); - } catch (Exception e) { - LOG.log(Level.SEVERE, "", e); - } - } - - @CliCommand(value = "connect", help = "Connect to Rya Triple Store") - public String connect( - @CliOption(key = {"instance"}, mandatory = true, help = "Accumulo Instance") final String instance, - @CliOption(key = {"user"}, mandatory = true, help = "Accumulo User") final String user, - @CliOption(key = {"pwd"}, mandatory = true, help = "Accumulo Pwd") final String pwd, - @CliOption(key = {"zk"}, mandatory = true, help = "Accumulo Zk (zk=mock for the mock instance)") final String zk, - @CliOption(key = {"pre"}, mandatory = false, help = "Accumulo table prefix", unspecifiedDefaultValue = "rya_") final String pre) { - try { - //using Cloudbase - Connector connector = null; - AccumuloRyaDAO cryaDao = new AccumuloRyaDAO(); - if ("mock".equals(zk)) { - //mock instance - connector = new MockInstance(instance).getConnector(user, pwd); - } else { - connector = new ZooKeeperInstance(instance, zk).getConnector(user, pwd); - } - - cryaDao.setConnector(connector); - AccumuloRdfConfiguration configuration = new AccumuloRdfConfiguration(); - configuration.setTablePrefix(pre); - cryaDao.setConf(configuration); - cryaDao.init(); - this.ryaDAO = cryaDao; - return "Connected to Accumulo"; - } catch (Exception e) { - LOG.log(Level.SEVERE, "", e); - } - return ""; - } - - @CliCommand(value = "disconnect", help = "Disconnect from Rya Store") - public String disconnect() { - if (ryaDAO == null) { - return "Command is not available because Rya is not connected. Please 'connect' first."; - } - try { - this.ryaDAO.destroy(); - this.ryaDAO = null; - } catch (RyaDAOException e) { - LOG.log(Level.SEVERE, "", e); - } - return ""; - } - - public RyaDAO getRyaDAO() { - return ryaDAO; - } - - public void setRyaDAO(RyaDAO ryaDAO) { - this.ryaDAO = ryaDAO; - } -} diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java deleted file mode 100644 index 97182aa11..000000000 --- a/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java +++ /dev/null @@ -1,47 +0,0 @@ -package mvm.rya.console; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultHistoryFileNameProvider; -import org.springframework.stereotype.Component; - -/** - * - * @author Jarred Li - * - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class RyaHistoryFileNameProvider extends DefaultHistoryFileNameProvider{ - - @Override - public String getHistoryFileName() { - return "ryaconsole.log"; - } - - @Override - public String getProviderName() { - return "Rya Console History Log"; - } - -} diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java deleted file mode 100644 index b199819b2..000000000 --- a/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java +++ /dev/null @@ -1,47 +0,0 @@ -package mvm.rya.console; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultPromptProvider; -import org.springframework.stereotype.Component; - -/** - * @author Jarred Li - * - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class RyaPromptProvider extends DefaultPromptProvider { - - @Override - public String getPrompt() { - return "rya>"; - } - - - @Override - public String getProviderName() { - return "Rya Console Prompt"; - } - -} diff --git a/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml b/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml deleted file mode 100644 index e593a4815..000000000 --- a/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - diff --git a/extras/rya.manual/pom.xml b/extras/rya.manual/pom.xml deleted file mode 100644 index 75c106a81..000000000 --- a/extras/rya.manual/pom.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.extras - 3.2.10-SNAPSHOT - - - rya.manual - Apache Rya Manual - - - - - org.apache.maven.plugins - maven-site-plugin - - - org.apache.maven.doxia - doxia-module-markdown - 1.6 - - - - UTF-8 - UTF-8 - - - - - - diff --git a/extras/rya.manual/src/site/markdown/_index.md b/extras/rya.manual/src/site/markdown/_index.md deleted file mode 100644 index bf030a3a6..000000000 --- a/extras/rya.manual/src/site/markdown/_index.md +++ /dev/null @@ -1,44 +0,0 @@ - - - -# Rya -- [Overview](overview.md) -- [Quick Start](quickstart.md) -- [Load Data](loaddata.md) -- [Query Data](querydata.md) -- [Evaluation Table](eval.md) -- [Pre-computed Joins](loadPrecomputedJoin.md) -- [Inferencing](infer.md) - -# Samples -- [Typical First Steps](sm-firststeps.md) -- [Simple Add/Query/Remove Statements](sm-simpleaqr.md) -- [Sparql query](sm-sparqlquery.md) -- [Adding Authentication](sm-addauth.md) -- [Inferencing](sm-infer.md) -- [Named Graph](sm-namedgraph.md) -- [Update data](sm-updatedata.md) -- [Alx](alx.md) - -# Development -- [Building From Source](build-source.md) -- [LTS Maven Settings XML](maven-settings.md) diff --git a/extras/rya.manual/src/site/markdown/alx.md b/extras/rya.manual/src/site/markdown/alx.md deleted file mode 100644 index 2d0eae750..000000000 --- a/extras/rya.manual/src/site/markdown/alx.md +++ /dev/null @@ -1,82 +0,0 @@ - - -# Alx Rya Integration - -Alx is a modular framework for developing applications. Rya has mechanisms to integrate directly into Alx to provide other modules access to queries. - -Currently, the Alx Rya extension only allows interacting with an Accumulo store. - -## Prerequisites - -- Alx 1.0.5+ (we will refer to it at the ALX_HOME directory from now on) -- alx.rya features xml (can be found in maven at `mvn:mvm.rya/alx.rya//xml/features`) - -## Steps - -1. Start up Alx -2. features:addurl alx.rya features xml -3. features:install alx.rya -4. (optional) features:install alx.rya.console - -That's it. To make sure, run `ls ` and make sure something like this pops up: - -``` -mvm.rya.alx.rya (99) provides: ------------------------------- -Bundle-SymbolicName = mvm.rya.alx.rya -Bundle-Version = 3.0.4.SNAPSHOT -objectClass = org.osgi.service.cm.ManagedService -service.id = 226 -service.pid = mvm.rya.alx ----- -... -``` - -## Using - -The bundle registers a Sail Repository, so you can interact with it directly as in the other code examples. Here is a quick groovy example of the usage: - -``` JAVA -import org.springframework.osgi.extensions.annotation.*; -import org.openrdf.repository.*; -import org.openrdf.model.ValueFactory; -import static mvm.rya.api.RdfCloudTripleStoreConstants.*; - -class TstRepo { - - @ServiceReference - public void setRepo(Repository repo) { - println repo - RepositoryConnection conn = repo.getConnection(); - ValueFactory vf = VALUE_FACTORY; - def statements = conn.getStatements(vf.createURI("http://www.Department0.University0.edu"), null, null, true); - while(statements.hasNext()) { - System.out.println(statements.next()); - } - statements.close(); - conn.close(); - } - -} -``` - -The bundle also registers a RyaDAO, so you can interact with the RyaDAO interface directly diff --git a/extras/rya.manual/src/site/markdown/build-source.md b/extras/rya.manual/src/site/markdown/build-source.md deleted file mode 100644 index 07f0cb5d8..000000000 --- a/extras/rya.manual/src/site/markdown/build-source.md +++ /dev/null @@ -1,36 +0,0 @@ - - -# Building from Source - -## Prerequisites - -* Rya code -* Maven 2.2 + - -## Building - -Using Git, pull down the latest code from the url above. - -Run the command to build the code `mvn clean install` - -If all goes well, here are the artifacts that you will be interested in: -* Rya-WAR : web/web-rya/target/web.rya.war diff --git a/extras/rya.manual/src/site/markdown/eval.md b/extras/rya.manual/src/site/markdown/eval.md deleted file mode 100644 index fc4095bf8..000000000 --- a/extras/rya.manual/src/site/markdown/eval.md +++ /dev/null @@ -1,79 +0,0 @@ - - -# Prospects Table - -The Prospects Table provides statistics on the number of subject/predicate/object data found in the triple store. It is currently a -Map Reduce job that will run against the Rya store and save all the statistics in the prosepcts table. - -## Build - -[Build the mmrts.git repo](build-source.md) - -## Run - -Deploy the `extras/rya.prospector/target/rya.prospector--shade.jar` file to the hadoop cluster. - -The prospector also requires a configuration file that defines where Accumulo is, which Rya table (has to be the SPO table) to read from, and -which table to output to. (Note: Make sure you follow the same schema as the Rya tables (prospects table name: tableprefix_prospects) - -A sample configuration file might look like the following: - -``` XML - - - - - prospector.intable - triplestore_spo - - - prospector.outtable - triplestore_prospects - - - prospector.auths - U,FOUO - - - instance - accumulo - - - zookeepers - localhost:2181 - - - username - root - - - password - secret - - -``` - -Run the command, filling in the correct information. - -``` -hadoop jar rya.prospector-3.0.4-SNAPSHOT-shade.jar mvm.rya.prospector.mr.Prospector /tmp/prospectorConf.xml -``` \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/index.md b/extras/rya.manual/src/site/markdown/index.md deleted file mode 100644 index 0748284d7..000000000 --- a/extras/rya.manual/src/site/markdown/index.md +++ /dev/null @@ -1,45 +0,0 @@ - - -# Rya - -This project contains documentation about the Rya, a scalable RDF triple store on top of Accumulo. - -- [Overview](overview.md) -- [Quick Start](quickstart.md) -- [Load Data](loaddata.md) -- [Query Data](querydata.md) -- [Evaluation Table](eval.md) -- [Pre-computed Joins](loadPrecomputedJoin.md) -- [Inferencing](infer.md) - -# Samples -- [Typical First Steps](sm-firststeps.md) -- [Simple Add/Query/Remove Statements](sm-simpleaqr.md) -- [Sparql query](sm-sparqlquery.md) -- [Adding Authentication](sm-addauth.md) -- [Inferencing](sm-infer.md) -- [Named Graph](sm-namedgraph.md) -- [Update data](sm-updatedata.md) -- [Alx](alx.md) - -# Development -- [Building From Source](build-source.md) diff --git a/extras/rya.manual/src/site/markdown/infer.md b/extras/rya.manual/src/site/markdown/infer.md deleted file mode 100644 index 35b6f1426..000000000 --- a/extras/rya.manual/src/site/markdown/infer.md +++ /dev/null @@ -1,35 +0,0 @@ - - -# Inferencing - -The current inferencing set supported includes: - -* rdfs:subClassOf -* rdfs:subPropertyOf -* owl:equivalentProperty -* owl:inverseOf -* owl:SymmetricProperty -* owl:TransitiveProperty (* This is implemented, but probably not fully. Still in testing) - -Nothing special has to be done outside of making sure that the RdfCloudTripleStore object has the InferencingEngine object set on it and properly configured. This is usually done by default. See the [Query Data Section](querydata.md) for a simple example. - -Also, the inferencing engine is set to pull down the latest model every 5 minutes currently (which is configurable). So if you load a new model, a previous RepositoryConnection may not pick up these changes into the Inferencing Engine yet. Getting the InferencingEngine object from the RdfCloudTripleStore and running the `refreshGraph` method can refresh the inferred graph immediately. \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md b/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md deleted file mode 100644 index 220cf030e..000000000 --- a/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md +++ /dev/null @@ -1,49 +0,0 @@ - - -# Load Pre-computed Join - -A tool has been created to load a pre-computed join. This tool will generate an index to support a pre-computed join on a user provided SPARQL query, and then register that query within Rya. - - -## Registering a pre-computed join - -Generating a pre-computed join is done using Pig to execute a series of Map Reduce jobs. The index (pre-computed join) is associated with a user defined SPARQL query. - -To execute the indexing tool, compile and run `mvm.rya.accumulo.pig.IndexWritingTool` -with the following seven input arguments: `[hdfsSaveLocation] [sparqlFile] [instance] [cbzk] [user] [password] [rdfTablePrefix]` - - -Options: - -* hdfsSaveLocation: a working directory on hdfs for storing interim results -* sparqlFile: the query to generate a precomputed join for -* instance: the accumulo instance name -* cbzk: the accumulo zookeeper name -* user: the accumulo username -* password: the accumulo password for the supplied user -* rdfTablePrefix : The tables (spo, po, osp) are prefixed with this qualifier. The tables become: (rdf.tablePrefix)spo,(rdf.tablePrefix)po,(rdf.tablePrefix)osp - - -# Using a Pre-computed Join - -An example of using a pre-computed join can be referenced in -`mvm.rya.indexing.external.ExternalSailExample` diff --git a/extras/rya.manual/src/site/markdown/loaddata.md b/extras/rya.manual/src/site/markdown/loaddata.md deleted file mode 100644 index 2c6bc000a..000000000 --- a/extras/rya.manual/src/site/markdown/loaddata.md +++ /dev/null @@ -1,142 +0,0 @@ - - -# Load Data - -There are a few mechanisms to load data - -## Web REST endpoint - -The War sets up a Web REST endpoint at `http://server/web.rya/loadrdf` that allows POST data to get loaded into the Rdf Store. This short tutorial will use Java code to post data. - -First, you will need data to load and will need to figure out what format that data is in. - -For this sample, we will use the following N-Triples: - -``` - . - "Thing" . - . -``` - -Save this file somewhere `$RDF_DATA` - -Second, use the following Java code to load data to the REST endpoint: - -``` JAVA -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.net.URL; -import java.net.URLConnection; - -public class LoadDataServletRun { - - public static void main(String[] args) { - try { - final InputStream resourceAsStream = Thread.currentThread().getContextClassLoader() - .getResourceAsStream("$RDF_DATA"); - URL url = new URL("http://server/web.rya/loadrdf" + - "?format=N-Triples" + - ""); - URLConnection urlConnection = url.openConnection(); - urlConnection.setRequestProperty("Content-Type", "text/plain"); - urlConnection.setDoOutput(true); - - final OutputStream os = urlConnection.getOutputStream(); - - int read; - while((read = resourceAsStream.read()) >= 0) { - os.write(read); - } - resourceAsStream.close(); - os.flush(); - - BufferedReader rd = new BufferedReader(new InputStreamReader( - urlConnection.getInputStream())); - String line; - while ((line = rd.readLine()) != null) { - System.out.println(line); - } - rd.close(); - os.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } -} -``` - -Compile and run this code above, changing the references for $RDF_DATA and the url that your Rdf War is running at. - -The default "format" is RDF/XML, but these formats are supported : RDFXML, NTRIPLES, TURTLE, N3, TRIX, TRIG. - -## Bulk Loading data - -Bulk loading data is done through Map Reduce jobs - -### Bulk Load RDF data - -This Map Reduce job will read a full file into memory and parse it into statements. The statements are saved into the store. Here is an example for storing in Accumulo: - -``` -hadoop jar target/accumulo.rya-3.0.4-SNAPSHOT-shaded.jar mvm.rya.accumulo.mr.fileinput.BulkNtripsInputTool -Dac.zk=localhost:2181 -Dac.instance=accumulo -Dac.username=root -Dac.pwd=secret -Drdf.tablePrefix=triplestore_ -Dio.sort.mb=64 /tmp/temp.ntrips -``` - -Options: - -- rdf.tablePrefix : The tables (spo, po, osp) are prefixed with this qualifier. The tables become: (rdf.tablePrefix)spo,(rdf.tablePrefix)po,(rdf.tablePrefix)osp -- ac.* : Accumulo connection parameters -- rdf.format : See RDFFormat from openrdf, samples include (Trig, N-Triples, RDF/XML) -- io.sort.mb : Higher the value, the faster the job goes. Just remember that you will need this much ram at least per mapper - -The argument is the directory/file to load. This file needs to be loaded into HDFS before running. - -## Direct OpenRDF API - -Here is some sample code to load data directly through the OpenRDF API. (Loading N-Triples data) -You will need at least `accumulo.rya-`, `rya.api`, `rya.sail.impl` on the classpath and transitive dependencies. I find that Maven is the easiest way to get a project dependency tree set up. - -``` JAVA -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -AccumuloRyaDAO dao = new AccumuloRdfDAO(); -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); -dao.setConnector(connector); -conf.setTablePrefix("rya_"); -dao.setConf(conf); -store.setRdfDao(dao); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); -RepositoryConnection conn = myRepository.getConnection(); - -//load data from file -final File file = new File("ntriples.ntrips"); -conn.add(new FileInputStream(file), file.getName(), - RDFFormat.NTRIPLES, new Resource[]{}); - -conn.commit(); - -conn.close(); -myRepository.shutDown(); -``` \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/overview.md b/extras/rya.manual/src/site/markdown/overview.md deleted file mode 100644 index 068bd57d2..000000000 --- a/extras/rya.manual/src/site/markdown/overview.md +++ /dev/null @@ -1,26 +0,0 @@ - - -# Overview - -RYA is a scalable RDF Store that is built on top of a Columnar Index Store (such as Accumulo). It is implemented as an extension to OpenRdf to provide easy query mechanisms (SPARQL, SERQL, etc) and Rdf data storage (RDF/XML, NTriples, etc). - -RYA stands for RDF y(and) Accumulo. diff --git a/extras/rya.manual/src/site/markdown/querydata.md b/extras/rya.manual/src/site/markdown/querydata.md deleted file mode 100644 index a7e2a6d3b..000000000 --- a/extras/rya.manual/src/site/markdown/querydata.md +++ /dev/null @@ -1,137 +0,0 @@ - - -# Query Data - -There are a few mechanisms to query data - -## Web JSP endpoint - -Open a url to `http://server/web.rya/sparqlQuery.jsp`. This simple form can run Sparql. - -## Web REST endpoint - -The War sets up a Web REST endpoint at `http://server/web.rya/queryrdf` that allows GET requests with queries. - -For this sample, we will assume you already loaded data from the [Load Data](loaddata.md) tutorial - -Save this file somewhere $RDF_DATA - -Second, use the following Java code to load data to the REST endpoint: - -``` JAVA -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; -import java.net.URLEncoder; - -public class QueryDataServletRun { - - public static void main(String[] args) { - try { - String query = "select * where {\n" + - " ?p ?o.\n" + - "}"; - - String queryenc = URLEncoder.encode(query, "UTF-8"); - - URL url = new URL("http://server/rdfTripleStore/queryrdf?query=" + queryenc); - URLConnection urlConnection = url.openConnection(); - urlConnection.setDoOutput(true); - - BufferedReader rd = new BufferedReader(new InputStreamReader( - urlConnection.getInputStream())); - String line; - while ((line = rd.readLine()) != null) { - System.out.println(line); - } - rd.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } -} -``` - -Compile and run this code above, changing the url that your Rdf War is running at. - -## Direct Code - -Here is a code snippet for directly running against Accumulo with the code. You will need at least accumulo.rya.jar, rya.api, rya.sail.impl on the classpath and transitive dependencies. I find that Maven is the easiest way to get a project dependency tree set up. - -``` JAVA -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); - -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); -crdfdao.setConnector(connector); - -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -conf.setTablePrefix("rts_"); -conf.setDisplayQueryPlan(true); -crdfdao.setConf(conf); -store.setRdfDao(crdfdao); - -ProspectorServiceEvalStatsDAO evalDao = new ProspectorServiceEvalStatsDAO(connector, conf); -evalDao.init(); -store.setRdfEvalStatsDAO(evalDao); - -InferenceEngine inferenceEngine = new InferenceEngine(); -inferenceEngine.setRdfDao(crdfdao); -inferenceEngine.setConf(conf); -store.setInferenceEngine(inferenceEngine); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); - -String query = "select * where {\n" + - " ?p ?o.\n" + - "}"; -RepositoryConnection conn = myRepository.getConnection(); -System.out.println(query); -TupleQuery tupleQuery = conn.prepareTupleQuery( - QueryLanguage.SPARQL, query); -ValueFactory vf = ValueFactoryImpl.getInstance(); - -TupleQueryResultHandler writer = new SPARQLResultsXMLWriter(System.out); -tupleQuery.evaluate(new TupleQueryResultHandler() { - - int count = 0; - - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - System.out.println(bindingSet); - } -}); - -conn.close(); -myRepository.shutDown(); -``` - diff --git a/extras/rya.manual/src/site/markdown/quickstart.md b/extras/rya.manual/src/site/markdown/quickstart.md deleted file mode 100644 index 4f0aa05ee..000000000 --- a/extras/rya.manual/src/site/markdown/quickstart.md +++ /dev/null @@ -1,62 +0,0 @@ - - -# Quick Start - -This tutorial will outline the steps needed to get quickly started with the Rya store using the web based endpoint. - -## Prerequisites - -* Columnar Store (Accumulo) -* Rya code (Git: git://git.apache.org/incubator-rya.git) -* Maven 3.0 + - -## Building from Source - -Using Git, pull down the latest code from the url above. - -Run the command to build the code `mvn clean install` - -If all goes well, the build should be successful and a war should be produced in `web/web.rya/target/web.rya.war` - -## Deployment Using Tomcat - -Unwar the above war into the webapps directory. - -To point the web.rya war to the appropriate Accumulo instance, make a properties file `environment.properties` and put it in the classpath. Here is an example: - -``` -instance.name=accumulo #Accumulo instance name -instance.zk=localhost:2181 #Accumulo Zookeepers -instance.username=root #Accumulo username -instance.password=secret #Accumulo pwd -rya.tableprefix=triplestore_ #Rya Table Prefix -rya.displayqueryplan=true #To display the query plan -``` - -Start the Tomcat server. `./bin/startup.sh` - -## Usage - -First, we need to load data. See the [Load Data Section] (loaddata.md) - -Second, we need to query that data. See the [Query Data Section](querydata.md) - diff --git a/extras/rya.manual/src/site/markdown/sm-addauth.md b/extras/rya.manual/src/site/markdown/sm-addauth.md deleted file mode 100644 index 2f32422c7..000000000 --- a/extras/rya.manual/src/site/markdown/sm-addauth.md +++ /dev/null @@ -1,119 +0,0 @@ - - -# Add Authentication - -This tutorial will give a few examples on how to load and query data with authentication. - -This is only available for accumulo and Accumulo because they provide the security filters necessary to do row level authentication and visibility. - -## Load Data with Visibilities - -During the Load process, there are a few ways to set the Column Visibility you want set on each of the corresponding rdf rows. - -### Global Visibility - -You can set the Column Visibility globally on the RdfCloudTripleStore, and it will use that particular value for every row saved. - -To do this, once you create and set up the RdfCloudTripleStore, just set the property on the store configuration: - -``` JAVA -//setup -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); -crdfdao.setConnector(connector); - -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -conf.setTablePrefix("rts_"); -conf.setDisplayQueryPlan(true); - -//set global column Visibility -conf.setCv("AUTH1|AUTH2"); - -crdfdao.setConf(conf); -store.setRdfDao(crdfdao); -``` - -The format is simply the same as the Column Visibility format. - -### Per triple or document based Visibility - -TODO: Not available as of yet - -## Query Data with Authentication - -Attaching an Authentication to the query process is very simple. It requires just adding the property `RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH` to the query `BindingSet` -Example: - -``` JAVA -//setup -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); -crdfdao.setConnector(connector); - -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -conf.setTablePrefix("rts_"); -conf.setDisplayQueryPlan(true); -crdfdao.setConf(conf); -//set global column Visibility -conf.setCv("1|2"); -store.setRdfDao(crdfdao); - -InferenceEngine inferenceEngine = new InferenceEngine(); -inferenceEngine.setRdfDao(crdfdao); -inferenceEngine.setConf(conf); -store.setInferenceEngine(inferenceEngine); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); -RepositoryConnection conn = myRepository.getConnection(); - -//define and add statement -String litdupsNS = "urn:test:litdups#"; -URI cpu = vf.createURI(litdupsNS, "cpu"); -URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -URI uri1 = vf.createURI(litdupsNS, "uri1"); -conn.add(cpu, loadPerc, uri1); -conn.commit(); - -//query with auth -String query = "select * where {" + - "<" + cpu.toString() + "> ?p ?o1." + - "}"; -TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); -tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("2")); -TupleQueryResult result = tupleQuery.evaluate(); -while(result.hasNext()) { - System.out.println(result.next()); -} -result.close(); - -//close -conn.close(); -myRepository.shutDown(); -``` - -Or you can set a global auth using the configuration: - -``` JAVA -conf.setAuth("2") -``` \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/sm-firststeps.md b/extras/rya.manual/src/site/markdown/sm-firststeps.md deleted file mode 100644 index 34f995b22..000000000 --- a/extras/rya.manual/src/site/markdown/sm-firststeps.md +++ /dev/null @@ -1,80 +0,0 @@ - - -# Typical First Steps - -In this tutorial, I will give you a quick overview of some of the first steps I perform to get data loaded and read for query. - -## Prerequisites - - We are assuming Accumulo 1.5+ usage here. - - * Rya Source Code `web.rya.war`) - * Accumulo on top of Hadoop 0.20+ - * RDF Data (in N-Triples format, this format is the easiest to bulk load) - -## Building Source - -Skip this section if you already have the Map Reduce artifact and the WAR - -See the [Build From Source Section](build-source.md) to get the appropriate artifacts built - -## Load Data - -I find that the best way to load the data is through the Bulk Load Map Reduce job. - -* Save the RDF Data above onto HDFS. From now on we will refer to this location as `` -* Move the `accumulo.rya--job.jar` onto the hadoop cluster -* Bulk load the data. Here is a sample command line: - -``` -hadoop jar ../accumulo.rya-2.0.0-SNAPSHOT-job.jar BulkNtripsInputTool -Drdf.tablePrefix=lubm_ -Dcb.username=user -Dcb.pwd=cbpwd -Dcb.instance=instance -Dcb.zk=zookeeperLocation -Drdf.format=N-Triples -``` - -Once the data is loaded, it is actually a good practice to compact your tables. You can do this by opening the accumulo shell `shell` and running the `compact` command on the generated tables. Remember the generated tables will be prefixed by the `rdf.tablePrefix` property you assigned above. The default tablePrefix is `rts`. - -Here is a sample accumulo shell command: - -``` -compact -p lubm_(.*) -``` - -See the [Load Data Section](loaddata.md) for more options on loading rdf data - -## Run the Statistics Optimizer - -For the best query performance, it is recommended to run the Statistics Optimizer to create the Evaluation Statistics table. This job will read through your data and gather statistics on the distribution of the dataset. This table is then queried before query execution to reorder queries based on the data distribution. - -See the [Evaluation Statistics Table Section](eval.md) on how to do this. - -## Query data - -I find the easiest way to query is just to use the WAR. Load the WAR into your favorite web application container and go to the sparqlQuery.jsp page. Example: - -``` -http://localhost:8080/web.rya/sparqlQuery.jsp -``` - -This page provides a very simple text box for running queries against the store and getting data back. (SPARQL queries) - -Remember to update the connection information in the WAR: `WEB-INF/spring/spring-accumulo.xml` - -See the [Query data section](querydata.md) for more information. \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/sm-infer.md b/extras/rya.manual/src/site/markdown/sm-infer.md deleted file mode 100644 index a2b0b66ce..000000000 --- a/extras/rya.manual/src/site/markdown/sm-infer.md +++ /dev/null @@ -1,353 +0,0 @@ - - -# Inferencing - -Rya currently provides simple inferencing. The supported list of inferred relationships include: - -- rdfs:subClassOf -- rdfs:subPropertyOf -- owl:EquivalentProperty -- owl:inverseOf -- owl:SymmetricProperty -- owl:TransitiveProperty (This is currently in beta and will not work for every case) -- owl:sameAs - -## Setup - -The Inferencing Engine is a scheduled job that runs by default every 5 minutes, this is configurable, to query the relationships in the store and develop the inferred graphs necessary to answer inferencing questions. - -This also means that if you load a model into the store, it could take up to 5 minutes for the inferred relationships to be available. - -As usual you will need to set up your `RdfCloudTripleStore` with the correct DAO, notice we add an `InferencingEngine` as well to the store. If this is not added, then no inferencing will be done on the queries: - -``` JAVA -//setup -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); -crdfdao.setConnector(connector); - -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -conf.setTablePrefix("rts_"); -conf.setDisplayQueryPlan(true); -crdfdao.setConf(conf); -store.setRdfDao(crdfdao); - -ProspectorServiceEvalStatsDAO evalDao = new ProspectorServiceEvalStatsDAO(connector, conf); -evalDao.init(); -store.setRdfEvalStatsDAO(evalDao); - -InferenceEngine inferenceEngine = new InferenceEngine(); -inferenceEngine.setRdfDao(crdfdao); -inferenceEngine.setConf(conf); -store.setInferenceEngine(inferenceEngine); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); -RepositoryConnection conn = myRepository.getConnection(); - -//query code goes here - -//close -conn.close(); -myRepository.shutDown(); -``` - -## Samples - -We will go through some quick samples on loading inferred relationships, seeing and diagnosing the query plan, and checking the data - -### Rdfs:SubClassOf - -First the code, which will load the following subclassof relationship: `UndergraduateStudent subclassof Student subclassof Person`. Then we will load into the tables three triples defining `UgradA rdf:type UndergraduateStudent, StudentB rdf:type Student, PersonC rdf:type Person` - -``` JAVA -conn.add(new StatementImpl(vf.createURI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Student"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "Student"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Person"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), RDF.TYPE, vf.createURI(litdupsNS, "UndergraduateStudent"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB"), RDF.TYPE, vf.createURI(litdupsNS, "Student"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "PersonC"), RDF.TYPE, vf.createURI(litdupsNS, "Person"))); -conn.commit(); -``` - -Remember that once the model is committed, it may take up to 5 minutes for the inferred relationships to be ready. Though you can override this property in the `InferencingEngine`. - -We shall run the following query: - -``` -PREFIX rdfs: -PREFIX rdf: -PREFIX lit: -select * where {?s rdf:type lit:Person.} -``` - -And should get back the following results: - -``` -[s=urn:test:litdups#StudentB] -[s=urn:test:litdups#PersonC] -[s=urn:test:litdups#UgradA] -``` - -#### How it works - -Let us look at the query plan: - -``` -QueryRoot - Projection - ProjectionElemList - ProjectionElem "s" - Join - FixedStatementPattern - Var (name=79f261ee-e930-4af1-bc09-e637cc0affef) - Var (name=c-79f261ee-e930-4af1-bc09-e637cc0affef, value=http://www.w3.org/2000/01/rdf-schema#subClassOf) - Var (name=-const-2, value=urn:test:litdups#Person, anonymous) - DoNotExpandSP - Var (name=s) - Var (name=-const-1, value=http://www.w3.org/1999/02/22-rdf-syntax-ns#type, anonymous) - Var (name=79f261ee-e930-4af1-bc09-e637cc0affef) -``` - -Basically, we first find out (through the InferencingEngine) what triples have subclassof with Person. The InferencingEngine will do the graph analysis to find the both Student and UndergraduateStudent are Person classes. -Then this information is joined with the statement pattern `(?s rdf:type ?inf)` where `?inf` is the results from the InferencingEngine. - -### Rdfs:SubPropertyOf - -SubPropertyOf defines that a property can be an instance of another property. For example, a `gradDegreeFrom subPropertyOf degreeFrom`. - -Also, EquivalentProperty can be thought of as specialized SubPropertyOf relationship where if `propA equivalentProperty propB` then that means that `propA subPropertyOf propB AND propB subPropertyOf propA` - -Sample Code: - -``` JAVA -conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "memberOf"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "associatedWith"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "gradDegreeFrom"), vf.createURI(litdupsNS, "Yale"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "ProfessorC"), vf.createURI(litdupsNS, "memberOf"), vf.createURI(litdupsNS, "Harvard"))); -conn.commit(); -``` - -With query: - -``` -PREFIX rdfs: -PREFIX rdf: -PREFIX lit: -select * where {?s lit:memberOf lit:Harvard.} -``` - -Will return results: - -``` -[s=urn:test:litdups#UgradA] -[s=urn:test:litdups#ProfessorC] -``` - -Since UgradA has undergraduateDegreeFrom Harvard and ProfessorC is memberOf Harvard. - -#### How it works - -This is very similar to the subClassOf relationship above. Basically the InferencingEngine provides what properties are subPropertyOf relationships with memberOf, and the second part of the Join checks to see if those properties are predicates with object "Harvard". - -Query Plan: - -``` -QueryRoot - Projection - ProjectionElemList - ProjectionElem "s" - Join - FixedStatementPattern - Var (name=0bad69f3-4769-4293-8318-e828b23dc52a) - Var (name=c-0bad69f3-4769-4293-8318-e828b23dc52a, value=http://www.w3.org/2000/01/rdf-schema#subPropertyOf) - Var (name=-const-1, value=urn:test:litdups#memberOf, anonymous) - DoNotExpandSP - Var (name=s) - Var (name=0bad69f3-4769-4293-8318-e828b23dc52a) - Var (name=-const-2, value=urn:test:litdups#Harvard, anonymous) -``` - -### InverseOf - -InverseOf defines a property that is an inverse relation of another property. For example, a student who has a `degreeFrom` a University also means that the University `hasAlumnus` student. - -Code: - -``` JAVA -conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createURI(litdupsNS, "hasAlumnus"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "Harvard"), vf.createURI(litdupsNS, "hasAlumnus"), vf.createURI(litdupsNS, "AlumC"))); -conn.commit(); -``` - -Query: - -``` -PREFIX rdfs: -PREFIX rdf: -PREFIX lit: -select * where {lit:Harvard lit:hasAlumnus ?s.} -``` - -Result: - -``` -[s=urn:test:litdups#AlumC] -[s=urn:test:litdups#GradB] -[s=urn:test:litdups#UgradA] -``` - -#### How it works - -The query planner will expand the statement pattern `Harvard hasAlumnus ?s` to a Union between `Harvard hasAlumnus ?s. and ?s degreeFrom Harvard` - -As a caveat, it is important to note that in general Union queries do not have the best performance, so having a property that has an inverseOf and subPropertyOf, could cause a query plan that might take long depending on how the query planner orders the joins. - -Query Plan - -``` -QueryRoot - Projection - ProjectionElemList - ProjectionElem "s" - InferUnion - StatementPattern - Var (name=-const-1, value=urn:test:litdups#Harvard, anonymous) - Var (name=-const-2, value=urn:test:litdups#hasAlumnus, anonymous) - Var (name=s) - StatementPattern - Var (name=s) - Var (name=-const-2, value=urn:test:litdups#degreeFrom) - Var (name=-const-1, value=urn:test:litdups#Harvard, anonymous) -``` - -### SymmetricProperty - -SymmetricProperty defines a relationship where, for example, if Bob is a friendOf Jeff, then Jeff is a friendOf Bob. (Hopefully) - -Code: - -``` JAVA -conn.add(new StatementImpl(vf.createURI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY)); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "Bob"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "James"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff"))); -conn.commit(); -``` - -Query: - -``` -PREFIX rdfs: -PREFIX rdf: -PREFIX lit: -select * where {?s lit:friendOf lit:Bob.} -``` - -Results: - -``` -[s=urn:test:litdups#Jeff] -``` - -#### How it works - -The query planner will recognize that `friendOf` is a SymmetricProperty and devise a Union to find the specified relationship and inverse. - -Query Plan: - -``` -QueryRoot - Projection - ProjectionElemList - ProjectionElem "s" - InferUnion - StatementPattern - Var (name=s) - Var (name=-const-1, value=urn:test:litdups#friendOf, anonymous) - Var (name=-const-2, value=urn:test:litdups#Bob, anonymous) - StatementPattern - Var (name=-const-2, value=urn:test:litdups#Bob, anonymous) - Var (name=-const-1, value=urn:test:litdups#friendOf, anonymous) - Var (name=s) -``` - -### TransitiveProperty - -TransitiveProperty provides a transitive relationship between resources. For example, if Queens is subRegionOf NYC and NYC is subRegionOf NY, then Queens is transitively a subRegionOf NY. - -Code: - -``` JAVA -conn.add(new StatementImpl(vf.createURI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY)); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "Queens"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NYC"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "NYC"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NY"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "NY"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "US"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "US"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NorthAmerica"))); -conn.add(new StatementImpl(vf.createURI(litdupsNS, "NorthAmerica"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "World"))); -conn.commit(); -``` - -Query: - -``` -PREFIX rdfs: -PREFIX rdf: -PREFIX lit: -select * where {?s lit:subRegionOf lit:NorthAmerica.} -``` - -Results: - -``` -[s=urn:test:litdups#Queens] -[s=urn:test:litdups#NYC] -[s=urn:test:litdups#NY] -[s=urn:test:litdups#US] -``` - -#### How it works - -The TransitiveProperty relationship works by running recursive queries till all the results are returned. - -It is important to note that certain TransitiveProperty relationships will not work: -* Open ended property: ?s subRegionOf ?o (At least one of the properties must be filled or will be filled as the query gets answered) -* Closed property: Queens subRegionOf NY (At least one of the properties must be empty) - -We are working on fixing these issues. - -Query Plan: - -``` -QueryRoot - Projection - ProjectionElemList - ProjectionElem "s" - TransitivePropertySP - Var (name=s) - Var (name=-const-1, value=urn:test:litdups#subRegionOf, anonymous) - Var (name=-const-2, value=urn:test:litdups#NorthAmerica, anonymous) -``` \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/sm-namedgraph.md b/extras/rya.manual/src/site/markdown/sm-namedgraph.md deleted file mode 100644 index 68263450d..000000000 --- a/extras/rya.manual/src/site/markdown/sm-namedgraph.md +++ /dev/null @@ -1,157 +0,0 @@ - - -# Named Graphs - -Named graphs are supported simply in the Rdf Store in a few ways. OpenRdf supports sending `contexts` as each triple is saved. - -## Simple Named Graph Load and Query - -Here is a very simple example of using the API to Insert data in named graphs and querying with Sparql - -First we will define a Trig document to load -Trig document - -``` -@prefix rdf: . -@prefix xsd: . -@prefix swp: . -@prefix dc: . -@prefix ex: . -@prefix : . -:G1 { :Monica ex:name "Monica Murphy" . - :Monica ex:homepage . - :Monica ex:email . - :Monica ex:hasSkill ex:Management } - -:G2 { :Monica rdf:type ex:Person . - :Monica ex:hasSkill ex:Programming } - -:G4 { :Phobe ex:name "Phobe Buffet" } - -:G3 { :G1 swp:assertedBy _:w1 . - _:w1 swp:authority :Chris . - _:w1 dc:date "2003-10-02"^^xsd:date . - :G2 swp:quotedBy _:w2 . - :G4 swp:assertedBy _:w2 . - _:w2 dc:date "2003-09-03"^^xsd:date . - _:w2 swp:authority :Tom . - :Chris rdf:type ex:Person . - :Chris ex:email . - :Tom rdf:type ex:Person . - :Tom ex:email } -``` - -We will assume that this file is saved on your classpath somewhere at `` - -Load data through API: - -``` JAVA -InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig"); -RepositoryConnection conn = repository.getConnection(); -conn.add(stream, "", RDFFormat.TRIG); -conn.commit(); -``` - -Now that the data is loaded we can easily query it. For example, we will query to find what `hasSkill` is defined in graph G2, and relate that to someone defined in G1. - -**Query:** - -``` -PREFIX ex: -PREFIX voc: -PREFIX foaf: -PREFIX rdfs: - -SELECT * -WHERE -{ - GRAPH ex:G1 - { - ?m voc:name ?name ; - voc:homepage ?hp . - } . - GRAPH ex:G2 - { - ?m voc:hasSkill ?skill . - } . -} -``` - -**Results:** - -``` -[hp=http://www.monicamurphy.org;m=http://www.example.org/exampleDocument#Monica;skill=http://www.example.org/vocabulary#Programming;name="Monica Murphy"] -``` - -**Here is the Query Plan as well:** - -``` -QueryRoot - Projection - ProjectionElemList - ProjectionElem "m" - ProjectionElem "name" - ProjectionElem "hp" - ProjectionElem "skill" - Join - Join - StatementPattern FROM NAMED CONTEXT - Var (name=m) - Var (name=-const-2, value=http://www.example.org/vocabulary#name, anonymous) - Var (name=name) - Var (name=-const-1, value=http://www.example.org/exampleDocument#G1, anonymous) - StatementPattern FROM NAMED CONTEXT - Var (name=m) - Var (name=-const-3, value=http://www.example.org/vocabulary#homepage, anonymous) - Var (name=hp) - Var (name=-const-1, value=http://www.example.org/exampleDocument#G1, anonymous) - StatementPattern FROM NAMED CONTEXT - Var (name=m) - Var (name=-const-5, value=http://www.example.org/vocabulary#hasSkill, anonymous) - Var (name=skill) - Var (name=-const-4, value=http://www.example.org/exampleDocument#G2, anonymous) -``` - -## Inserting named graph data through Sparql - -The new Sparql update standard provides another way to insert data, even into named graphs. - -First the insert update: - -``` -PREFIX dc: -PREFIX ex: -INSERT DATA -{ - GRAPH ex:G1 { - dc:title "A new book" ; - dc:creator "A.N.Other" . - } -} -``` - -To perform this update, it requires different code than querying the data directly: - -``` -Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); -update.execute(); -``` \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/sm-simpleaqr.md b/extras/rya.manual/src/site/markdown/sm-simpleaqr.md deleted file mode 100644 index cb8f0685f..000000000 --- a/extras/rya.manual/src/site/markdown/sm-simpleaqr.md +++ /dev/null @@ -1,75 +0,0 @@ - - -# Simple Add Query and Remove of Statements - -This quick tutorial will give a small example on how to add, query, and remove statements from Rya - -## Code - -``` JAVA -//setup -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); -crdfdao.setConnector(connector); - -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -conf.setTablePrefix("rts_"); -conf.setDisplayQueryPlan(true); -crdfdao.setConf(conf); -store.setRdfDao(crdfdao); - -ProspectorServiceEvalStatsDAO evalDao = new ProspectorServiceEvalStatsDAO(connector, conf); -evalDao.init(); -store.setRdfEvalStatsDAO(evalDao); - -InferenceEngine inferenceEngine = new InferenceEngine(); -inferenceEngine.setRdfDao(crdfdao); -inferenceEngine.setConf(conf); -store.setInferenceEngine(inferenceEngine); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); -RepositoryConnection conn = myRepository.getConnection(); - -//define and add statement -String litdupsNS = "urn:test:litdups#"; -URI cpu = vf.createURI(litdupsNS, "cpu"); -URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -URI uri1 = vf.createURI(litdupsNS, "uri1"); -conn.add(cpu, loadPerc, uri1); -conn.commit(); - -//query for all statements that have subject=cpu and pred=loadPerc (wildcard object) -RepositoryResult result = conn.getStatements(cpu, loadPerc, null, true) -while(result.hasNext()) { - System.out.println(result.next()); -} -result.close(); - -//remove statement -conn.remove(cpu, loadPerc, uri1); - -//close -conn.close(); -myRepository.shutDown(); -``` \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/sm-sparqlquery.md b/extras/rya.manual/src/site/markdown/sm-sparqlquery.md deleted file mode 100644 index 639ca0240..000000000 --- a/extras/rya.manual/src/site/markdown/sm-sparqlquery.md +++ /dev/null @@ -1,79 +0,0 @@ - - -# Simple Add Query and Remove of Statements - -This quick tutorial will give a small example on how to query data with SPARQL - -## Code - -``` JAVA -//setup -Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password"); -final RdfCloudTripleStore store = new RdfCloudTripleStore(); -AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); -crdfdao.setConnector(connector); - -AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -conf.setTablePrefix("rts_"); -conf.setDisplayQueryPlan(true); -crdfdao.setConf(conf); -store.setRdfDao(crdfdao); - -ProspectorServiceEvalStatsDAO evalDao = new ProspectorServiceEvalStatsDAO(connector, conf); -evalDao.init(); -store.setRdfEvalStatsDAO(evalDao); - -InferenceEngine inferenceEngine = new InferenceEngine(); -inferenceEngine.setRdfDao(crdfdao); -inferenceEngine.setConf(conf); -store.setInferenceEngine(inferenceEngine); - -Repository myRepository = new RyaSailRepository(store); -myRepository.initialize(); -RepositoryConnection conn = myRepository.getConnection(); - -//define and add statements -String litdupsNS = "urn:test:litdups#"; -URI cpu = vf.createURI(litdupsNS, "cpu"); -URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -URI uri1 = vf.createURI(litdupsNS, "uri1"); -URI pred2 = vf.createURI(litdupsNS, "pred2"); -URI uri2 = vf.createURI(litdupsNS, "uri2"); -conn.add(cpu, loadPerc, uri1); -conn.commit(); - -//query using sparql -String query = "select * where {" + - "?x <" + loadPerc.stringValue() + "> ?o1." + - "?x <" + pred2.stringValue() + "> ?o2." + - "}"; -TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); -TupleQueryResult result = tupleQuery.evaluate(); -while(result.hasNext()) { - System.out.println(result.next()); -} -result.close(); - -//close -conn.close(); -myRepository.shutDown(); -``` \ No newline at end of file diff --git a/extras/rya.manual/src/site/markdown/sm-updatedata.md b/extras/rya.manual/src/site/markdown/sm-updatedata.md deleted file mode 100644 index f0fe66472..000000000 --- a/extras/rya.manual/src/site/markdown/sm-updatedata.md +++ /dev/null @@ -1,83 +0,0 @@ - - -# Sparql Update - -OpenRDF supports the Sparql Update functionality. Here are a few samples: - -Remember, you have to use `RepositoryConnection.prepareUpdate(..)` to perform these queries - -**Insert:** - -``` -PREFIX dc: -INSERT DATA -{ dc:title "A new book" ; - dc:creator "A.N.Other" . -} -``` - -**Delete:** - -``` -PREFIX dc: -DELETE DATA -{ dc:title "A new book" ; - dc:creator "A.N.Other" . -} -``` - -**Update:** - -``` -PREFIX dc: -DELETE { ?book dc:title ?title } -INSERT { ?book dc:title "A newer book". ?book dc:add "Additional Info" } -WHERE - { ?book dc:creator "A.N.Other" . - } -``` - -**Insert Named Graph:** - -``` -PREFIX dc: -PREFIX ex: -INSERT DATA -{ GRAPH ex:G1 { - dc:title "A new book" ; - dc:creator "A.N.Other" . -} -} -``` - -**Update Named Graph:** - -``` -PREFIX dc: -WITH -DELETE { ?book dc:title ?title } -INSERT { ?book dc:title "A newer book". - ?book dc:add "Additional Info" } -WHERE - { ?book dc:creator "A.N.Other" . - } -``` diff --git a/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js b/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js deleted file mode 100644 index 7fe883488..000000000 --- a/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -window.onload = function() { - var anchors = document.getElementsByTagName("a"); - for (var i = 0; i < anchors.length; i++) { - anchors[i].href = anchors[i].href.replace(/\.md$/,'\.html'); - } - } diff --git a/extras/rya.manual/src/site/site.xml b/extras/rya.manual/src/site/site.xml deleted file mode 100644 index a671d3db6..000000000 --- a/extras/rya.manual/src/site/site.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - org.apache.maven.skins - maven-fluido-skin - 1.4 - - - - false - true - false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/extras/rya.prospector/pom.xml b/extras/rya.prospector/pom.xml deleted file mode 100644 index a9b5c61d0..000000000 --- a/extras/rya.prospector/pom.xml +++ /dev/null @@ -1,109 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.extras - 3.2.10-SNAPSHOT - - - rya.prospector - Apache Rya Prospector - - - - org.apache.rya - rya.api - - - org.apache.rya - accumulo.rya - - - - commons-lang - commons-lang - - - com.google.guava - guava - - - org.codehaus.groovy - groovy-all - - - - org.apache.mrunit - mrunit - hadoop2 - test - - - - - - - maven-compiler-plugin - - groovy-eclipse-compiler - - - - org.codehaus.groovy - groovy-eclipse-compiler - 2.9.1-01 - - - - org.codehaus.groovy - groovy-eclipse-batch - 2.3.7-01 - - - - - org.codehaus.groovy - groovy-eclipse-compiler - 2.9.1-01 - true - - - org.apache.maven.plugins - maven-shade-plugin - - - - true - map-reduce - - - - - - - - - - diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy deleted file mode 100644 index 6017da4be..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.domain - -/** - * Date: 12/5/12 - * Time: 11:33 AM - */ -class IndexEntry { - def String index - def String data - def String dataType - def String tripleValueType - def String visibility - def Long count - def Long timestamp - - @Override - public String toString() { - return "IndexEntry{" + - "index='" + index + '\'' + - ", data='" + data + '\'' + - ", dataType='" + dataType + '\'' + - ", tripleValueType=" + tripleValueType + - ", visibility='" + visibility + '\'' + - ", timestamp='" + timestamp + '\'' + - ", count=" + count + - '}'; - } - - boolean equals(o) { - if (this.is(o)) return true - if (getClass() != o.class) return false - - IndexEntry that = (IndexEntry) o - - if (count != that.count) return false - if (timestamp != that.timestamp) return false - if (data != that.data) return false - if (dataType != that.dataType) return false - if (index != that.index) return false - if (tripleValueType != that.tripleValueType) return false - if (visibility != that.visibility) return false - - return true - } - - int hashCode() { - int result - result = (index != null ? index.hashCode() : 0) - result = 31 * result + (data != null ? data.hashCode() : 0) - result = 31 * result + (dataType != null ? dataType.hashCode() : 0) - result = 31 * result + (tripleValueType != null ? tripleValueType.hashCode() : 0) - result = 31 * result + (visibility != null ? visibility.hashCode() : 0) - result = 31 * result + (count != null ? count.hashCode() : 0) - result = 31 * result + (timestamp != null ? timestamp.hashCode() : 0) - return result - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy deleted file mode 100644 index fadf6e803..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.domain - -import org.apache.hadoop.io.WritableComparable - -import static mvm.rya.prospector.domain.TripleValueType.* - -/** - * Date: 12/3/12 - * Time: 11:15 AM - */ -class IntermediateProspect implements WritableComparable { - - def String index - def String data - def String dataType - def TripleValueType tripleValueType - def String visibility - - @Override - int compareTo(IntermediateProspect t) { - if(!index.equals(t.index)) - return index.compareTo(t.index); - if(!data.equals(t.data)) - return data.compareTo(t.data); - if(!dataType.equals(t.dataType)) - return dataType.compareTo(t.dataType); - if(!tripleValueType.equals(t.tripleValueType)) - return tripleValueType.compareTo(t.tripleValueType); - if(!visibility.equals(t.visibility)) - return visibility.compareTo(t.visibility); - return 0 - } - - @Override - void write(DataOutput dataOutput) { - dataOutput.writeUTF(index); - dataOutput.writeUTF(data); - dataOutput.writeUTF(dataType); - dataOutput.writeUTF(tripleValueType.name()); - dataOutput.writeUTF(visibility); - } - - @Override - void readFields(DataInput dataInput) { - index = dataInput.readUTF() - data = dataInput.readUTF() - dataType = dataInput.readUTF() - tripleValueType = TripleValueType.valueOf(dataInput.readUTF()) - visibility = dataInput.readUTF() - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java deleted file mode 100644 index 183b0d246..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java +++ /dev/null @@ -1,26 +0,0 @@ -package mvm.rya.prospector.domain; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -public enum TripleValueType { - - subject, predicate, object, entity, subjectpredicate, predicateobject, subjectobject -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy deleted file mode 100644 index 6c4a05594..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.mr - -import mvm.rya.prospector.utils.ProspectorUtils -import org.apache.accumulo.core.data.Mutation -import org.apache.accumulo.core.data.Value -import org.apache.accumulo.core.security.ColumnVisibility -import org.apache.hadoop.conf.Configured -import org.apache.hadoop.util.Tool -import org.apache.hadoop.util.ToolRunner -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.fs.Path -import org.apache.hadoop.mapreduce.Job - -import org.apache.hadoop.io.LongWritable -import org.apache.commons.lang.time.DateUtils - -import mvm.rya.prospector.domain.IntermediateProspect - -import com.google.common.collect.Lists - -import static mvm.rya.prospector.utils.ProspectorConstants.* -import static mvm.rya.prospector.utils.ProspectorUtils.* - -/** - * Date: 12/3/12 - * Time: 10:57 AM - */ -class Prospector extends Configured implements Tool { - - private static long NOW = System.currentTimeMillis(); - - private Date truncatedDate; - - public static void main(String[] args) { - int res = ToolRunner.run(new Prospector(), args); - System.exit(res); - } - - @Override - int run(String[] args) { - Configuration conf = getConf(); - - truncatedDate = DateUtils.truncate(new Date(NOW), Calendar.MINUTE); - - Path configurationPath = new Path(args[0]); - conf.addResource(configurationPath); - - def inTable = conf.get("prospector.intable") - def outTable = conf.get("prospector.outtable") - def auths_str = conf.get("prospector.auths") - assert inTable != null - assert outTable != null - assert auths_str != null - - Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - - String[] auths = auths_str.split(",") - ProspectorUtils.initMRJob(job, inTable, outTable, auths) - - job.getConfiguration().setLong("DATE", NOW); - - def performant = conf.get(PERFORMANT) - if (Boolean.parseBoolean(performant)) { - /** - * Apply some performance tuning - */ - ProspectorUtils.addMRPerformance(job.configuration) - } - - job.setMapOutputKeyClass(IntermediateProspect.class); - job.setMapOutputValueClass(LongWritable.class); - - job.setMapperClass(ProspectorMapper.class); - job.setCombinerClass(ProspectorCombiner.class); - job.setReducerClass(ProspectorReducer.class); - job.waitForCompletion(true); - - int success = job.isSuccessful() ? 0 : 1; - - if (success == 0) { - Mutation m = new Mutation(METADATA) - m.put(PROSPECT_TIME, getReverseIndexDateTime(truncatedDate), new ColumnVisibility(DEFAULT_VIS), truncatedDate.time, new Value(EMPTY)) - writeMutations(connector(instance(conf), conf), outTable, [m]) - } - - return success - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy deleted file mode 100644 index fe1c5b27e..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.mr - -import mvm.rya.prospector.plans.IndexWorkPlan -import mvm.rya.prospector.plans.IndexWorkPlanManager -import mvm.rya.prospector.plans.impl.ServicesBackedIndexWorkPlanManager -import org.apache.commons.lang.time.DateUtils -import org.apache.hadoop.mapreduce.Reducer -import mvm.rya.prospector.utils.ProspectorUtils - -/** - * Date: 12/3/12 - * Time: 11:06 AM - */ -class ProspectorCombiner extends Reducer { - - private Date truncatedDate; - private IndexWorkPlanManager manager = new ServicesBackedIndexWorkPlanManager() - Map plans - - @Override - public void setup(Reducer.Context context) throws IOException, InterruptedException { - super.setup(context); - - long now = context.getConfiguration().getLong("DATE", System.currentTimeMillis()); - truncatedDate = DateUtils.truncate(new Date(now), Calendar.MINUTE); - - this.plans = ProspectorUtils.planMap(manager.plans) - } - - @Override - protected void reduce(def prospect, Iterable values, Reducer.Context context) { - def plan = plans.get(prospect.index) - if (plan != null) { - def coll = plan.combine(prospect, values) - if (coll != null) { - coll.each { entry -> - context.write(entry.key, entry.value) - } - } - } - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy deleted file mode 100644 index 18fa32b7e..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.mr - -import mvm.rya.accumulo.AccumuloRdfConfiguration -import mvm.rya.api.RdfCloudTripleStoreConstants -import mvm.rya.api.domain.RyaStatement -import mvm.rya.api.resolver.RyaTripleContext -import mvm.rya.api.resolver.triple.TripleRow -import mvm.rya.prospector.plans.IndexWorkPlan -import mvm.rya.prospector.plans.IndexWorkPlanManager -import mvm.rya.prospector.plans.impl.ServicesBackedIndexWorkPlanManager - -import org.apache.commons.lang.time.DateUtils -import org.apache.hadoop.mapreduce.Mapper - -/** - * Date: 12/3/12 - * Time: 11:06 AM - */ -class ProspectorMapper extends Mapper { - - private Date truncatedDate; - private RyaTripleContext ryaContext; - private IndexWorkPlanManager manager = new ServicesBackedIndexWorkPlanManager() - private Collection plans = manager.plans - - @Override - public void setup(Mapper.Context context) throws IOException, InterruptedException { - super.setup(context); - - long now = context.getConfiguration().getLong("DATE", System.currentTimeMillis()); - ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(context.getConfiguration())); - truncatedDate = DateUtils.truncate(new Date(now), Calendar.MINUTE); - } - - @Override - public void map(def row, def data, Mapper.Context context) { - RyaStatement ryaStatement = ryaContext.deserializeTriple(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, - new TripleRow( - row.row.bytes, - row.columnFamily.bytes, - row.columnQualifier.bytes, - row.timestamp, - row.columnVisibility.bytes, - data.get() - ) - ) - plans.each { plan -> - def coll = plan.map(ryaStatement) - if (coll != null) { - coll.each { entry -> - context.write(entry.key, entry.value) - } - } - } - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy deleted file mode 100644 index 8b12aae2e..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.mr - -import mvm.rya.prospector.plans.IndexWorkPlan -import mvm.rya.prospector.plans.IndexWorkPlanManager -import mvm.rya.prospector.plans.impl.ServicesBackedIndexWorkPlanManager -import org.apache.commons.lang.time.DateUtils -import org.apache.hadoop.mapreduce.Reducer -import mvm.rya.prospector.utils.ProspectorUtils - -/** - * Date: 12/3/12 - * Time: 11:06 AM - */ -class ProspectorReducer extends Reducer { - - private Date truncatedDate; - private IndexWorkPlanManager manager = new ServicesBackedIndexWorkPlanManager() - Map plans - - @Override - public void setup(Reducer.Context context) throws IOException, InterruptedException { - super.setup(context); - - def conf = context.getConfiguration() - long now = conf.getLong("DATE", System.currentTimeMillis()); - truncatedDate = DateUtils.truncate(new Date(now), Calendar.MINUTE); - - this.plans = ProspectorUtils.planMap(manager.plans) - } - - @Override - protected void reduce(def prospect, Iterable values, Reducer.Context context) { - def plan = plans.get(prospect.index) - if (plan != null) { - plan.reduce(prospect, values, truncatedDate, context) - } - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy deleted file mode 100644 index d9ba71920..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.plans - -import mvm.rya.api.domain.RyaStatement -import mvm.rya.prospector.domain.IntermediateProspect -import org.apache.hadoop.io.LongWritable -import org.apache.hadoop.mapreduce.Reducer -import org.openrdf.model.vocabulary.XMLSchema -import mvm.rya.prospector.domain.IndexEntry - -/** - * Date: 12/3/12 - * Time: 11:12 AM - */ -public interface IndexWorkPlan { - - public static final String URITYPE = XMLSchema.ANYURI.stringValue() - public static final LongWritable ONE = new LongWritable(1) - public static final String DELIM = "\u0000"; - - public Collection> map(RyaStatement ryaStatement) - - public Collection> combine(IntermediateProspect prospect, Iterable counts); - - public void reduce(IntermediateProspect prospect, Iterable counts, Date timestamp, Reducer.Context context) - - public String getIndexType() - - public String getCompositeValue(List indices) - - public List query(def connector, String tableName, List prospectTimes, String type, String index, String dataType, String[] auths) - -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy deleted file mode 100644 index 555f84ad9..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.plans - -/** - * Date: 12/3/12 - * Time: 11:24 AM - */ -public interface IndexWorkPlanManager { - - public Collection getPlans(); -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy deleted file mode 100644 index 091c29533..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.plans.impl - -import mvm.rya.api.domain.RyaStatement -import mvm.rya.prospector.domain.IndexEntry -import mvm.rya.prospector.domain.IntermediateProspect -import mvm.rya.prospector.domain.TripleValueType -import mvm.rya.prospector.plans.IndexWorkPlan -import mvm.rya.prospector.utils.CustomEntry -import mvm.rya.prospector.utils.ProspectorUtils - -import org.apache.accumulo.core.data.Mutation -import org.apache.accumulo.core.data.Range -import org.apache.accumulo.core.data.Value -import org.apache.accumulo.core.security.Authorizations -import org.apache.accumulo.core.security.ColumnVisibility -import org.apache.hadoop.io.LongWritable -import org.apache.hadoop.io.Text -import org.apache.hadoop.mapreduce.Reducer -import org.openrdf.model.util.URIUtil -import org.openrdf.model.vocabulary.XMLSchema; - -import static mvm.rya.prospector.utils.ProspectorConstants.COUNT; -import mvm.rya.api.RdfCloudTripleStoreConstants - -/** - * Date: 12/3/12 - * Time: 12:28 PM - */ -class CountPlan implements IndexWorkPlan { - - @Override - Collection> map(RyaStatement ryaStatement) { - def subject = ryaStatement.getSubject() - def predicate = ryaStatement.getPredicate() - def subjpred = ryaStatement.getSubject().data + DELIM + ryaStatement.getPredicate().data - def predobj = ryaStatement.getPredicate().data + DELIM + ryaStatement.getObject().data - def subjobj = ryaStatement.getSubject().data + DELIM + ryaStatement.getObject().data - def object = ryaStatement.getObject() - def localIndex = URIUtil.getLocalNameIndex(subject.data) - def namespace = subject.data.substring(0, localIndex - 1) - def visibility = new String(ryaStatement.columnVisibility) - return [ - new CustomEntry( - new IntermediateProspect(index: COUNT, - data: subject.data, - dataType: URITYPE, - tripleValueType: TripleValueType.subject, - visibility: visibility), - ONE), - new CustomEntry( - new IntermediateProspect(index: COUNT, - data: predicate.data, - dataType: URITYPE, - tripleValueType: TripleValueType.predicate, - visibility: visibility - ), ONE), - new CustomEntry( - new IntermediateProspect(index: COUNT, - data: object.data, - dataType: object.dataType.stringValue(), - tripleValueType: TripleValueType.object, - visibility: visibility - ), ONE), - new CustomEntry( - new IntermediateProspect(index: COUNT, - data: subjpred, - dataType: XMLSchema.STRING, - tripleValueType: TripleValueType.subjectpredicate, - visibility: visibility - ), ONE), - new CustomEntry( - new IntermediateProspect(index: COUNT, - data: subjobj, - dataType: XMLSchema.STRING, - tripleValueType: TripleValueType.subjectobject, - visibility: visibility - ), ONE), - new CustomEntry( - new IntermediateProspect(index: COUNT, - data: predobj, - dataType: XMLSchema.STRING, - tripleValueType: TripleValueType.predicateobject, - visibility: visibility - ), ONE), - new CustomEntry( - new IntermediateProspect(index: COUNT, - data: namespace, - dataType: URITYPE, - tripleValueType: TripleValueType.entity, - visibility: visibility - ), ONE), - ] - } - - @Override - Collection> combine(IntermediateProspect prospect, Iterable counts) { - - def iter = counts.iterator() - long sum = 0; - iter.each { lw -> - sum += lw.get() - } - - return [new CustomEntry(prospect, new LongWritable(sum))] - } - - @Override - void reduce(IntermediateProspect prospect, Iterable counts, Date timestamp, Reducer.Context context) { - def iter = counts.iterator() - long sum = 0; - iter.each { lw -> - sum += lw.get() - } - - def indexType = prospect.tripleValueType.name() - - // not sure if this is the best idea.. - if ((sum >= 0) || - indexType.equals(TripleValueType.predicate.toString())) { - - Mutation m = new Mutation(indexType + DELIM + prospect.data + DELIM + ProspectorUtils.getReverseIndexDateTime(timestamp)) - m.put(COUNT, prospect.dataType, new ColumnVisibility(prospect.visibility), timestamp.getTime(), new Value("${sum}".getBytes())); - - context.write(null, m); - } - } - - @Override - String getIndexType() { - return COUNT - } - - @Override - String getCompositeValue(List indices){ - Iterator indexIt = indices.iterator(); - String compositeIndex = indexIt.next(); - while (indexIt.hasNext()){ - String value = indexIt.next(); - compositeIndex += DELIM + value; - } - return compositeIndex; - } - - @Override - List query(def connector, String tableName, List prospectTimes, String type, String compositeIndex, String dataType, String[] auths) { - - assert connector != null && tableName != null && type != null && compositeIndex != null - - def bs = connector.createBatchScanner(tableName, new Authorizations(auths), 4) - def ranges = [] - int max = 1000; //by default only return 1000 prospects maximum - if (prospectTimes != null) { - prospectTimes.each { prospect -> - ranges.add( - new Range(type + DELIM + compositeIndex + DELIM + ProspectorUtils.getReverseIndexDateTime(new Date(prospect)))) - } - } else { - max = 1; //only return the latest if no prospectTimes given - def prefix = type + DELIM + compositeIndex + DELIM; - ranges.add(new Range(prefix, prefix + RdfCloudTripleStoreConstants.LAST)) - } - bs.ranges = ranges - if (dataType != null) { - bs.fetchColumn(new Text(COUNT), new Text(dataType)) - } else { - bs.fetchColumnFamily(new Text(COUNT)) - } - - List indexEntries = new ArrayList() - def iter = bs.iterator() - - while (iter.hasNext() && indexEntries.size() <= max) { - def entry = iter.next() - def k = entry.key - def v = entry.value - - def rowArr = k.row.toString().split(DELIM) - String values = ""; - // if it is a composite index, then return the type as a composite index - if (type.equalsIgnoreCase(TripleValueType.subjectpredicate.toString()) || - type.equalsIgnoreCase(TripleValueType.subjectobject.toString()) || - type.equalsIgnoreCase(TripleValueType.predicateobject.toString())){ - values =rowArr[1] + DELIM + rowArr[2] - } - else values = rowArr[1] - - indexEntries.add(new IndexEntry(data: values, - tripleValueType: rowArr[0], - index: COUNT, - dataType: k.columnQualifier.toString(), - visibility: k.columnVisibility.toString(), - count: Long.parseLong(new String(v.get())), - timestamp: k.timestamp - )) - } - bs.close() - - return indexEntries - } - -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy deleted file mode 100644 index 6f3f7a638..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.plans.impl - -import mvm.rya.prospector.plans.IndexWorkPlan -import com.google.common.collect.Lists -import mvm.rya.prospector.plans.IndexWorkPlanManager - -/** - * Date: 12/3/12 - * Time: 11:24 AM - */ -class ServicesBackedIndexWorkPlanManager implements IndexWorkPlanManager { - - def Collection plans - - ServicesBackedIndexWorkPlanManager() { - def iterator = ServiceLoader.load(IndexWorkPlan.class).iterator(); - plans = Lists.newArrayList(iterator) - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy deleted file mode 100644 index bb8ceb4d5..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.service - -import mvm.rya.prospector.utils.ProspectorUtils -import org.apache.accumulo.core.data.Key -import org.apache.accumulo.core.data.Range -import org.apache.accumulo.core.security.Authorizations -import org.apache.hadoop.io.Text - -import static mvm.rya.prospector.utils.ProspectorConstants.METADATA -import static mvm.rya.prospector.utils.ProspectorConstants.PROSPECT_TIME -import mvm.rya.prospector.plans.IndexWorkPlanManager -import mvm.rya.prospector.plans.impl.ServicesBackedIndexWorkPlanManager -import mvm.rya.prospector.plans.IndexWorkPlan -import mvm.rya.prospector.domain.IndexEntry - -/** - * Date: 12/5/12 - * Time: 12:28 PM - */ -class ProspectorService { - - def connector - String tableName - - IndexWorkPlanManager manager = new ServicesBackedIndexWorkPlanManager() - Map plans - - ProspectorService(def connector, String tableName) { - this.connector = connector - this.tableName = tableName - this.plans = ProspectorUtils.planMap(manager.plans) - - //init - def tos = connector.tableOperations() - if(!tos.exists(tableName)) { - tos.create(tableName) - } - } - - public Iterator getProspects(String[] auths) { - - def scanner = connector.createScanner(tableName, new Authorizations(auths)) - scanner.setRange(Range.exact(METADATA)); - scanner.fetchColumnFamily(new Text(PROSPECT_TIME)); - - def iterator = scanner.iterator(); - - return new Iterator() { - - - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public Long next() { - return iterator.next().getKey().getTimestamp(); - } - - @Override - public void remove() { - iterator.remove(); - } - }; - - } - - public Iterator getProspectsInRange(long beginTime, long endTime, String[] auths) { - - def scanner = connector.createScanner(tableName, new Authorizations(auths)) - scanner.setRange(new Range( - new Key(METADATA, PROSPECT_TIME, ProspectorUtils.getReverseIndexDateTime(new Date(endTime)), "", Long.MAX_VALUE), - new Key(METADATA, PROSPECT_TIME, ProspectorUtils.getReverseIndexDateTime(new Date(beginTime)), "", 0l) - )) - def iterator = scanner.iterator(); - - return new Iterator() { - - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public Long next() { - return iterator.next().getKey().getTimestamp(); - } - - @Override - public void remove() { - iterator.remove(); - } - }; - - } - - public List query(List prospectTimes, String indexType, String type, List index, String dataType, String[] auths) { - assert indexType != null - - def plan = plans.get(indexType) - assert plan != null: "Index Type: ${indexType} does not exist" - String compositeIndex = plan.getCompositeValue(index); - - return plan.query(connector, tableName, prospectTimes, type, compositeIndex, dataType, auths) - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy deleted file mode 100644 index 3e8aba1da..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.service - -import mvm.rya.api.RdfCloudTripleStoreConfiguration -import mvm.rya.api.persist.RdfEvalStatsDAO -import mvm.rya.prospector.domain.TripleValueType -import mvm.rya.prospector.utils.ProspectorConstants -import org.apache.hadoop.conf.Configuration -import org.openrdf.model.Resource -import org.openrdf.model.Value - -import mvm.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF - -/** - * An ${@link mvm.rya.api.persist.RdfEvalStatsDAO} that uses the Prospector Service underneath return counts. - */ -class ProspectorServiceEvalStatsDAO implements RdfEvalStatsDAO { - - def ProspectorService prospectorService - - ProspectorServiceEvalStatsDAO() { - } - - ProspectorServiceEvalStatsDAO(ProspectorService prospectorService, RdfCloudTripleStoreConfiguration conf) { - this.prospectorService = prospectorService - } - - public ProspectorServiceEvalStatsDAO(def connector, RdfCloudTripleStoreConfiguration conf) { - this.prospectorService = new ProspectorService(connector, getProspectTableName(conf)) - } - - @Override - void init() { - assert prospectorService != null - } - - @Override - boolean isInitialized() { - return prospectorService != null - } - - @Override - void destroy() { - - } - - @Override - public double getCardinality(RdfCloudTripleStoreConfiguration conf, CARDINALITY_OF card, List val) { - - assert conf != null && card != null && val != null - String triplePart = null; - switch (card) { - case (CARDINALITY_OF.SUBJECT): - triplePart = TripleValueType.subject - break; - case (CARDINALITY_OF.PREDICATE): - triplePart = TripleValueType.predicate - break; - case (CARDINALITY_OF.OBJECT): - triplePart = TripleValueType.object - break; - case (CARDINALITY_OF.SUBJECTPREDICATE): - triplePart = TripleValueType.subjectpredicate - break; - case (CARDINALITY_OF.SUBJECTOBJECT): - triplePart = TripleValueType.subjectobject - break; - case (CARDINALITY_OF.PREDICATEOBJECT): - triplePart = TripleValueType.predicateobject - break; - } - - String[] auths = conf.getAuths() - List indexedValues = new ArrayList(); - Iterator valueIt = val.iterator(); - while (valueIt.hasNext()){ - indexedValues.add(valueIt.next().stringValue()); - } - - def indexEntries = prospectorService.query(null, ProspectorConstants.COUNT, triplePart, indexedValues, null /** what is the datatype here? */, - auths) - - return indexEntries.size() > 0 ? indexEntries.head().count : -1 - } - - @Override - double getCardinality(RdfCloudTripleStoreConfiguration conf, CARDINALITY_OF card, List val, Resource context) { - return getCardinality(conf, card, val) //TODO: Not sure about the context yet - } - - @Override - public void setConf(RdfCloudTripleStoreConfiguration conf) { - - } - - @Override - RdfCloudTripleStoreConfiguration getConf() { - return null - } - - public static String getProspectTableName(RdfCloudTripleStoreConfiguration conf) { - return conf.getTablePrefix() + "prospects"; - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy deleted file mode 100644 index c550b92fd..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.utils - -/** - * Date: 12/3/12 - * Time: 12:33 PM - */ -class CustomEntry implements Map.Entry { - - K key; - V value; - - CustomEntry(K key, V value) { - this.key = key - this.value = value - } - - K getKey() { - return key - } - - void setKey(K key) { - this.key = key - } - - V getValue() { - return value - } - - V setValue(V value) { - this.value = value - this.value - } -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy deleted file mode 100644 index 197e7357d..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.utils - -/** - * Date: 12/5/12 - * Time: 10:57 AM - */ -class ProspectorConstants { - public static final String COUNT = "count" - public static final String METADATA = "metadata" - public static final String PROSPECT_TIME = "prospectTime" - public static final String DEFAULT_VIS = "U&FOUO" - public static final byte[] EMPTY = new byte [0]; - - //config properties - public static final String PERFORMANT = "performant" - - public static final String USERNAME = "username" - public static final String PASSWORD = "password" - public static final String INSTANCE = "instance" - public static final String ZOOKEEPERS = "zookeepers" - public static final String MOCK = "mock" -} diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy deleted file mode 100644 index 640f17e55..000000000 --- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.utils - -import org.apache.accumulo.core.client.Connector -import org.apache.accumulo.core.client.Instance -import org.apache.accumulo.core.client.ZooKeeperInstance -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat -import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat -import org.apache.accumulo.core.client.mock.MockInstance -import org.apache.accumulo.core.data.Mutation -import org.apache.accumulo.core.security.Authorizations -import org.apache.commons.lang.Validate -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.io.Text -import org.apache.hadoop.io.compress.GzipCodec -import org.apache.hadoop.mapreduce.Job - -import java.text.SimpleDateFormat -import mvm.rya.prospector.plans.IndexWorkPlan -import org.apache.accumulo.core.client.security.tokens.PasswordToken - -import static mvm.rya.prospector.utils.ProspectorConstants.* - -/** - * Date: 12/4/12 - * Time: 4:24 PM - */ -class ProspectorUtils { - - public static final long INDEXED_DATE_SORT_VAL = 999999999999999999L; // 18 char long, same length as date format pattern below - public static final String INDEXED_DATE_FORMAT = "yyyyMMddHHmmsssSSS"; - - public static String getReverseIndexDateTime(Date date) { - Validate.notNull(date); - String formattedDateString = new SimpleDateFormat(INDEXED_DATE_FORMAT).format(date); - long diff = INDEXED_DATE_SORT_VAL - Long.valueOf(formattedDateString); - - return Long.toString(diff); - } - - public static Map planMap(def plans) { - plans.inject([:]) { map, plan -> - map.putAt(plan.indexType, plan) - map - } - } - - public static void initMRJob(Job job, String table, String outtable, String[] auths) { - Configuration conf = job.configuration - String username = conf.get(USERNAME) - String password = conf.get(PASSWORD) - String instance = conf.get(INSTANCE) - String zookeepers = conf.get(ZOOKEEPERS) - String mock = conf.get(MOCK) - - //input - if (Boolean.parseBoolean(mock)) { - AccumuloInputFormat.setMockInstance(job, instance) - AccumuloOutputFormat.setMockInstance(job, instance) - } else if (zookeepers != null) { - AccumuloInputFormat.setZooKeeperInstance(job, instance, zookeepers) - AccumuloOutputFormat.setZooKeeperInstance(job, instance, zookeepers) - } else { - throw new IllegalArgumentException("Must specify either mock or zookeepers"); - } - - AccumuloInputFormat.setConnectorInfo(job, username, new PasswordToken(password.getBytes())) - AccumuloInputFormat.setInputTableName(job, table) - job.setInputFormatClass(AccumuloInputFormat.class); - AccumuloInputFormat.setScanAuthorizations(job, new Authorizations(auths)) - - // OUTPUT - job.setOutputFormatClass(AccumuloOutputFormat.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Mutation.class); - AccumuloOutputFormat.setConnectorInfo(job, username, new PasswordToken(password.getBytes())) - AccumuloOutputFormat.setDefaultTableName(job, outtable) - } - - public static void addMRPerformance(Configuration conf) { - conf.setBoolean("mapred.map.tasks.speculative.execution", false); - conf.setBoolean("mapred.reduce.tasks.speculative.execution", false); - conf.set("io.sort.mb", "256"); - conf.setBoolean("mapred.compress.map.output", true); - conf.set("mapred.map.output.compression.codec", GzipCodec.class.getName()); - } - - public static Instance instance(Configuration conf) { - assert conf != null - - String instance_str = conf.get(INSTANCE) - String zookeepers = conf.get(ZOOKEEPERS) - String mock = conf.get(MOCK) - if (Boolean.parseBoolean(mock)) { - return new MockInstance(instance_str) - } else if (zookeepers != null) { - return new ZooKeeperInstance(instance_str, zookeepers) - } else { - throw new IllegalArgumentException("Must specify either mock or zookeepers"); - } - } - - public static Connector connector(Instance instance, Configuration conf) { - String username = conf.get(USERNAME) - String password = conf.get(PASSWORD) - if (instance == null) - instance = instance(conf) - return instance.getConnector(username, password) - } - - public static void writeMutations(Connector connector, String tableName, def mutations) { - def bw = connector.createBatchWriter(tableName, 10000l, 10000l, 4); - mutations.each { m -> - bw.addMutation(m) - } - bw.flush() - bw.close() - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java deleted file mode 100644 index 0ed8026a8..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java +++ /dev/null @@ -1,640 +0,0 @@ -package mvm.rya.joinselect; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mvm.rya.accumulo.AccumuloRdfUtils; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.layout.TableLayoutStrategy; -import mvm.rya.api.persist.RdfDAOException; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.api.persist.joinselect.SelectivityEvalDAO; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.openrdf.model.Resource; -import org.openrdf.model.URI; -import org.openrdf.query.algebra.QueryModelNode; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.impl.ExternalSet; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - - - - - -public class AccumuloSelectivityEvalDAO implements SelectivityEvalDAO { - - private boolean initialized = false; - private RdfCloudTripleStoreConfiguration conf; - private Connector connector; - private TableLayoutStrategy tableLayoutStrategy; - private boolean filtered = false; - private boolean denormalized = false; - private int FullTableCardinality = 0; - private static final String DELIM = "\u0000"; - private Map joinMap = new HashMap();; - private RdfEvalStatsDAO resd; - - @Override - public void init() throws RdfDAOException { - try { - if (isInitialized()) { - throw new IllegalStateException("Already initialized"); - } - if (!resd.isInitialized()) { - resd.init(); - } - checkNotNull(connector); - tableLayoutStrategy = conf.getTableLayoutStrategy(); - TableOperations tos = connector.tableOperations(); - AccumuloRdfUtils.createTableIfNotExist(tos, tableLayoutStrategy.getSelectivity()); - AccumuloRdfUtils.createTableIfNotExist(tos, tableLayoutStrategy.getProspects()); - initialized = true; - } catch (Exception e) { - throw new RdfDAOException(e); - } - } - - - public AccumuloSelectivityEvalDAO() { - - } - - - public AccumuloSelectivityEvalDAO(RdfCloudTripleStoreConfiguration conf, Connector connector) { - - this.conf = conf; - this.connector = connector; - } - - public AccumuloSelectivityEvalDAO(RdfCloudTripleStoreConfiguration conf) { - - this.conf = conf; - Instance inst = new ZooKeeperInstance(conf.get("sc.cloudbase.instancename"), conf.get("sc.cloudbase.zookeepers")); - try { - this.connector = inst.getConnector(conf.get("sc.cloudbase.username"), conf.get("sc.cloudbase.password")); - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } - } - - @Override - public void destroy() throws RdfDAOException { - if (!isInitialized()) { - throw new IllegalStateException("Not initialized"); - } - initialized = false; - } - - @Override - public boolean isInitialized() throws RdfDAOException { - return initialized; - } - - public Connector getConnector() { - return connector; - } - - public void setConnector(Connector connector) { - this.connector = connector; - } - - @Override - public RdfCloudTripleStoreConfiguration getConf() { - return conf; - } - - @Override - public void setConf(RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - } - - public RdfEvalStatsDAO getRdfEvalDAO() { - return resd; - } - - public void setRdfEvalDAO(RdfEvalStatsDAO resd) { - this.resd = resd; - } - - public void setFiltered(boolean filtered) { - this.filtered = filtered; - } - - - public void setDenormalized(boolean denormalize) { - this.denormalized = denormalize; - } - - private double getJoinSelect(RdfCloudTripleStoreConfiguration conf, StatementPattern sp1, StatementPattern sp2) throws TableNotFoundException { - - if (FullTableCardinality == 0) { - this.getTableSize(conf); - } - - Authorizations authorizations = getAuths(conf); - String row1 = CardinalityCalcUtil.getRow(sp1, true); - String row2 = CardinalityCalcUtil.getRow(sp2, true); - List joinType = CardinalityCalcUtil.getJoins(sp1, sp2); - - if (joinType.size() == 0) { - return 1; - } - - if (joinType.size() == 2) { - - String cacheRow1; - String cacheRow2; - long card1 = 0; - long card2 = 0; - boolean contCard1 = false; - boolean contCard2 = false; - - cacheRow1 = row1 + DELIM + joinType.get(0); - cacheRow2 = row2 + DELIM + joinType.get(1); - - long count1 = getCardinality(conf, sp1); - long count2 = getCardinality(conf, sp2); - - if (count1 == 0 || count2 == 0) { - return 0; - } - - if (joinMap.containsKey(cacheRow1)) { - card1 = joinMap.get(cacheRow1); - contCard1 = true; - } - if (joinMap.containsKey(cacheRow2)) { - card2 = joinMap.get(cacheRow2); - contCard2 = true; - } - - if (!contCard1) { - Scanner joinScanner = connector.createScanner(tableLayoutStrategy.getSelectivity(), authorizations); - joinScanner.setRange(Range.prefix(row1)); - - for (Map.Entry entry : joinScanner) { - if (entry.getKey().getColumnFamily().toString().equals(joinType.get(0))) { - card1 = CardinalityCalcUtil.getJCard(entry.getKey()); - joinMap.put(cacheRow1, card1); - // System.out.println("Card1 is " + card1); - break; - } - } - } - - if (!contCard2) { - Scanner joinScanner = connector.createScanner(tableLayoutStrategy.getSelectivity(), authorizations); - joinScanner.setRange(Range.prefix(row2)); - for (Map.Entry entry : joinScanner) { - if (entry.getKey().getColumnFamily().toString().equals(joinType.get(1))) { - card2 = CardinalityCalcUtil.getJCard(entry.getKey()); - joinMap.put(cacheRow2, card2); - // System.out.println("Card2 is " + card2); - break; - } - } - - } - - if (!filtered && !denormalized) { - double temp1 = Math.min(((double) card1) / ((double) count1 * FullTableCardinality), ((double) card2) / ((double) count2 * FullTableCardinality)); - - double temp2 = Math.max((double) count1 / FullTableCardinality, (double) count2 / FullTableCardinality); - - // TODO maybe change back to original form as temp2 will rarely be less than temp1. - return Math.min(temp1, temp2); - } else if(denormalized) { - return Math.min(card1,card2); - } else { - - return Math.min(((double) card1 * count2) / ((double) count1 * FullTableCardinality * FullTableCardinality), ((double) card2 * count1) - / ((double) count2 * FullTableCardinality * FullTableCardinality)); - - } - } else { - - String cacheRow1 = row1 + DELIM + joinType.get(0); - String cacheRow2 = row1 + DELIM + joinType.get(1); - String cacheRow3 = row2 + DELIM + joinType.get(2); - String cacheRow4 = row2 + DELIM + joinType.get(3); - long card1 = 0; - long card2 = 0; - long card3 = 0; - long card4 = 0; - boolean contCard1 = false; - boolean contCard2 = false; - - long count1 = getCardinality(conf, sp1); - long count2 = getCardinality(conf, sp2); - - if (count1 == 0 || count2 == 0) { - return 0; - } - - if (joinMap.containsKey(cacheRow1) && joinMap.containsKey(cacheRow2)) { - card1 = joinMap.get(cacheRow1); - card2 = joinMap.get(cacheRow2); - contCard1 = true; - } - if (joinMap.containsKey(cacheRow3) && joinMap.containsKey(cacheRow4)) { - card3 = joinMap.get(cacheRow3); - card4 = joinMap.get(cacheRow4); - contCard2 = true; - } - - if (!contCard1) { - Scanner joinScanner = connector.createScanner(tableLayoutStrategy.getSelectivity(), authorizations); - joinScanner.setRange(Range.prefix(row1)); - boolean found1 = false; - boolean found2 = false; - - for (Map.Entry entry : joinScanner) { - - if (entry.getKey().getColumnFamily().toString().equals(joinType.get(0))) { - card1 = CardinalityCalcUtil.getJCard(entry.getKey()); - joinMap.put(cacheRow1, card1); - found1 = true; - // System.out.println("Card1 is " + card1); - if (found1 && found2) { - card1 = Math.min(card1, card2); - break; - } - } else if (entry.getKey().getColumnFamily().toString().equals(joinType.get(1))) { - card2 = CardinalityCalcUtil.getJCard(entry.getKey()); - joinMap.put(cacheRow2, card2); - found2 = true; - // System.out.println("Card1 is " + card1); - if (found1 && found2) { - card1 = Math.min(card1, card2); - break; - } - } - } - } - - if (!contCard2) { - Scanner joinScanner = connector.createScanner(tableLayoutStrategy.getSelectivity(), authorizations); - joinScanner.setRange(Range.prefix(row2)); - boolean found1 = false; - boolean found2 = false; - for (Map.Entry entry : joinScanner) { - if (entry.getKey().getColumnFamily().toString().equals(joinType.get(2))) { - card3 = CardinalityCalcUtil.getJCard(entry.getKey()); - joinMap.put(cacheRow3, card3); - found1 = true; - // System.out.println("Card2 is " + card2); - if (found1 && found2) { - card3 = Math.min(card3, card4); - break; - } - } else if (entry.getKey().getColumnFamily().toString().equals(joinType.get(3))) { - card4 = CardinalityCalcUtil.getJCard(entry.getKey()); - joinMap.put(cacheRow4, card4); - found2 = true; - // System.out.println("Card1 is " + card1); - if (found1 && found2) { - card3 = Math.min(card3, card4); - break; - } - } - } - - } - - if (!filtered && !denormalized) { - return Math.min(((double) card1) / ((double) count1 * FullTableCardinality), ((double) card3) / ((double) count2 * FullTableCardinality)); - } else if(denormalized) { - return Math.min(card1,card3); - } else { - return Math.min(((double) card1 * count2) / ((double) count1 * FullTableCardinality * FullTableCardinality), ((double) card3 * count1) - / ((double) count2 * FullTableCardinality * FullTableCardinality)); - - } - - } - - } - - // TODO currently computes average selectivity of sp1 with each node in TupleExpr te (is this best?) - private double getSpJoinSelect(RdfCloudTripleStoreConfiguration conf, TupleExpr te, StatementPattern sp1) - throws TableNotFoundException { - - // System.out.println("Tuple is " + te + " and sp is " + sp1); - - if (te instanceof StatementPattern) { - return getJoinSelect(conf, (StatementPattern) te, sp1); - } else { - - SpExternalCollector spe = new SpExternalCollector(); - te.visit(spe); - List espList = spe.getSpExtTup(); - - if (espList.size() == 0) { - - Set tupBn = te.getAssuredBindingNames(); - Set eBn = sp1.getAssuredBindingNames(); - Set intersect = Sets.intersection(tupBn, eBn); - - return Math.pow(1.0 / 10000.0, intersect.size()); - - } - - double min = Double.MAX_VALUE; - double select = Double.MAX_VALUE; - - for (QueryModelNode node : espList) { - - if (node instanceof StatementPattern) - select = getJoinSelect(conf, sp1, (StatementPattern) node); - else if (node instanceof ExternalSet) { - select = getExtJoinSelect(sp1, (ExternalSet) node); - } - - if (min > select) { - min = select; - } - } - // System.out.println("Max is " + max); - return min; - } - } - - public double getJoinSelect(RdfCloudTripleStoreConfiguration conf, TupleExpr te1, TupleExpr te2) throws TableNotFoundException { - - SpExternalCollector spe = new SpExternalCollector(); - te2.visit(spe); - List espList = spe.getSpExtTup(); - - double min = Double.MAX_VALUE; - - for (QueryModelNode node : espList) { - double select = getSelectivity(conf, te1, node); - if (min > select) { - min = select; - } - } - - return min; - } - - - - - private double getSelectivity(RdfCloudTripleStoreConfiguration conf, TupleExpr te, QueryModelNode node) throws TableNotFoundException { - - if ((node instanceof StatementPattern)) { - return getSpJoinSelect(conf, te, (StatementPattern) node); - - } else if (node instanceof ExternalSet) { - - return getExtJoinSelect(te, (ExternalSet) node); - - } else { - return 0; - } - - } - - - - - - private double getExtJoinSelect(TupleExpr te, ExternalSet eSet) { - - Set tupBn = te.getAssuredBindingNames(); - Set eBn = eSet.getAssuredBindingNames(); - Set intersect = Sets.intersection(tupBn, eBn); - - return Math.pow(1.0 / 10000.0, intersect.size()); - - } - - - - - - - - - - - - // obtains cardinality for StatementPattern. Returns cardinality of 0 - // if no instances of constants occur in table. - // assumes composite cardinalities will be used. - @Override - public long getCardinality(RdfCloudTripleStoreConfiguration conf, StatementPattern sp) throws TableNotFoundException { - - Var subjectVar = sp.getSubjectVar(); - Resource subj = (Resource) getConstantValue(subjectVar); - Var predicateVar = sp.getPredicateVar(); - URI pred = (URI) getConstantValue(predicateVar); - Var objectVar = sp.getObjectVar(); - org.openrdf.model.Value obj = getConstantValue(objectVar); - Resource context = (Resource) getConstantValue(sp.getContextVar()); - - /** - * We put full triple scans before rdf:type because more often than not the triple scan is being joined with something else that is better than asking the - * full rdf:type of everything. - */ - double cardinality = 0; - try { - cardinality = 2*getTableSize(conf); - } catch (Exception e1) { - e1.printStackTrace(); - } - try { - if (subj != null) { - List values = new ArrayList(); - CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT; - values.add(subj); - - if (pred != null) { - values.add(pred); - card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTPREDICATE; - } else if (obj != null) { - values.add(obj); - card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTOBJECT; - } - - double evalCard = this.getCardinality(conf, card, values, context); - // the cardinality will be -1 if there was no value found (if - // the index does not exist) - if (evalCard >= 0) { - cardinality = Math.min(cardinality, evalCard); - } else { - // TODO change this to agree with prospector - cardinality = 0; - } - } else if (pred != null) { - List values = new ArrayList(); - CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE; - values.add(pred); - - if (obj != null) { - values.add(obj); - card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATEOBJECT; - } - - double evalCard = this.getCardinality(conf, card, values, context); - if (evalCard >= 0) { - cardinality = Math.min(cardinality, evalCard); - } else { - // TODO change this to agree with prospector - cardinality = 0; - } - } else if (obj != null) { - List values = new ArrayList(); - values.add(obj); - double evalCard = this.getCardinality(conf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values, context); - if (evalCard >= 0) { - cardinality = Math.min(cardinality, evalCard); - } else { - // TODO change this to agree with prospector - cardinality = 0; - } - } else { - cardinality = getTableSize(conf); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - // TODO is this okay? - return (long) cardinality; - } - - private org.openrdf.model.Value getConstantValue(Var var) { - if (var != null) - return var.getValue(); - else - return null; - } - - public double getCardinality(RdfCloudTripleStoreConfiguration conf, CARDINALITY_OF card, List val) throws RdfDAOException { - return resd.getCardinality(conf, card, val); - } - - public double getCardinality(RdfCloudTripleStoreConfiguration conf, CARDINALITY_OF card, List val, Resource context) throws RdfDAOException { - - return resd.getCardinality(conf, card, val, context); - - } - - public int getTableSize(RdfCloudTripleStoreConfiguration conf) throws TableNotFoundException { - - Authorizations authorizations = getAuths(conf); - - - if (joinMap.containsKey("subjectpredicateobject" + DELIM + "FullTableCardinality")) { - FullTableCardinality = joinMap.get("subjectpredicateobject" + DELIM + "FullTableCardinality").intValue(); - return FullTableCardinality; - } - - if (FullTableCardinality == 0) { - Scanner joinScanner = connector.createScanner(tableLayoutStrategy.getSelectivity(), authorizations); - joinScanner.setRange(Range.prefix(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"))); - Iterator> iterator = joinScanner.iterator(); - if (iterator.hasNext()) { - Map.Entry entry = iterator.next(); - if (entry.getKey().getColumnFamily().toString().equals("FullTableCardinality")) { - String Count = entry.getKey().getColumnQualifier().toString(); - FullTableCardinality = Integer.parseInt(Count); - } - } - if (FullTableCardinality == 0) { - throw new RuntimeException("Table does not contain full cardinality"); - } - - } - - return FullTableCardinality; - - } - - - private Authorizations getAuths(RdfCloudTripleStoreConfiguration conf) { - String[] auths = conf.getAuths(); - Authorizations authorizations = null; - if (auths == null || auths.length == 0) { - authorizations = new Authorizations(); - } else { - authorizations = new Authorizations(auths); - } - - return authorizations; - } - - - - private static class SpExternalCollector extends QueryModelVisitorBase { - - private List eSet = Lists.newArrayList(); - - - @Override - public void meetNode(QueryModelNode node) throws RuntimeException { - if (node instanceof ExternalSet || node instanceof StatementPattern) { - eSet.add(node); - } - super.meetNode(node); - } - - public List getSpExtTup() { - return eSet; - } - - } - - - - - - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java deleted file mode 100644 index a54a5af86..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java +++ /dev/null @@ -1,267 +0,0 @@ -package mvm.rya.joinselect; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.List; - -import org.apache.accumulo.core.data.Key; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -public class CardinalityCalcUtil { - - private static final String DELIM = "\u0000"; - - private static String intToTriplePlace(int i) { - - int place = i; - - switch (place) { - - case 0: - return "subject"; - - case 1: - return "predicate"; - - case 2: - return "object"; - - default: - throw new IllegalArgumentException("Invalid integer triple place."); - - } - - } - - private static int triplePlaceToInt(String s) { - - if (s.equals("subject")) { - return 0; - } else if (s.equals("predicate")) { - return 1; - } else if (s.equals("object")) { - return 2; - } else - throw new IllegalArgumentException("Invalid triple place."); - - } - - private static List getVariablePos(StatementPattern sp) { - - List posList = new ArrayList(); - List varList = sp.getVarList(); - - for (int i = 0; i < 3; i++) { - if (!varList.get(i).isConstant()) { - posList.add(intToTriplePlace(i)); - - } - } - - return posList; - - } - - private static List getConstantPos(StatementPattern sp) { - - List posList = new ArrayList(); - List varList = sp.getVarList(); - - for (int i = 0; i < 3; i++) { - if (varList.get(i).isConstant()) { - posList.add(intToTriplePlace(i)); - - } - } - - return posList; - - } - - // assumes sp contains at most two constants - // TODO might not be good if all variable sp is needed to get table size - public static String getRow(StatementPattern sp, boolean joinTable) { - - String row = ""; - String values = ""; - List varList = sp.getVarList(); - List constList = CardinalityCalcUtil.getConstantPos(sp); - int i; - - for (String s : constList) { - - i = CardinalityCalcUtil.triplePlaceToInt(s); - - if (row.equals("subject") && s.equals("object") && joinTable) { - row = s + row; - if (values.length() == 0) { - values = values + removeQuotes(varList.get(i).getValue().toString()); - } else { - values = removeQuotes(varList.get(i).getValue().toString()) + DELIM + values; - } - } else { - row = row + s; - if (values.length() == 0) { - values = values + removeQuotes(varList.get(i).getValue().toString()); - } else { - values = values + DELIM + removeQuotes(varList.get(i).getValue().toString()); - } - } - - } - - return (row + DELIM + values); - - } - - - - - private static String removeQuotes(String s) { - String trim = s.trim(); - if (trim.substring(0, 1).equals("\"")) { - trim = trim.substring(1, trim.length() - 1); - } - return trim; - } - - - - - - public static long getJCard(Key key) { - - String s = key.getColumnQualifier().toString(); - return Long.parseLong(s); - - } - - //determines a list of the positions in which two SPs have a common variable - private static List getJoinType(StatementPattern sp1, StatementPattern sp2) { - - List joinList = new ArrayList(); - List spList1 = sp1.getVarList(); - List spList2 = sp2.getVarList(); - - List pos1 = CardinalityCalcUtil.getVariablePos(sp1); - List pos2 = CardinalityCalcUtil.getVariablePos(sp2); - - int i, j; - - for (String s : pos1) { - for (String t : pos2) { - i = CardinalityCalcUtil.triplePlaceToInt(s); - j = CardinalityCalcUtil.triplePlaceToInt(t); - - if (spList1.get(i).getName().equals(spList2.get(j).getName())) { - joinList.add(s); - joinList.add(t); - - } - - } - } - if (joinList.size() == 4) { - return orderJoinType(joinList); - } - - return joinList; - - } - - // assumes list size is four - private static List orderJoinType(List jList) { - - List tempList = new ArrayList(); - - if (jList.get(0).equals("subject") && jList.get(2).equals("object")) { - tempList.add(jList.get(2)); - tempList.add(jList.get(0)); - tempList.add(jList.get(3)); - tempList.add(jList.get(1)); - return tempList; - } else { - tempList.add(jList.get(0)); - tempList.add(jList.get(2)); - tempList.add(jList.get(1)); - tempList.add(jList.get(3)); - return tempList; - } - - } - - // assumes size is four - private static List reverseJoinType(List jList) { - - List tempList = new ArrayList(); - - if (jList.get(2).equals("subject") && jList.get(3).equals("object")) { - tempList.add(jList.get(3)); - tempList.add(jList.get(2)); - tempList.add(jList.get(1)); - tempList.add(jList.get(0)); - return tempList; - } else if (jList.get(2).equals("predicate") && jList.get(3).equals("subject")) { - tempList.add(jList.get(3)); - tempList.add(jList.get(2)); - tempList.add(jList.get(1)); - tempList.add(jList.get(0)); - return tempList; - } else if (jList.get(2).equals("object") && jList.get(3).equals("predicate")) { - tempList.add(jList.get(3)); - tempList.add(jList.get(2)); - tempList.add(jList.get(1)); - tempList.add(jList.get(0)); - return tempList; - } else { - tempList.add(jList.get(2)); - tempList.add(jList.get(3)); - tempList.add(jList.get(0)); - tempList.add(jList.get(1)); - return tempList; - } - } - - public static List getJoins(StatementPattern sp1, StatementPattern sp2) { - List jList = new ArrayList(); - List list = getJoinType(sp1, sp2); - if (list.size() == 0) { - return list; - } else if (list.size() == 2) { - jList.add(list.get(0) + list.get(1)); - jList.add(list.get(1) + list.get(0)); - return jList; - } else { - - list = orderJoinType(list); - jList.add(list.get(0) + list.get(1) + list.get(2) + list.get(3)); - jList.add(list.get(0) + list.get(1) + list.get(3) + list.get(2)); - list = reverseJoinType(list); - jList.add(list.get(0) + list.get(1) + list.get(2) + list.get(3)); - jList.add(list.get(0) + list.get(1) + list.get(3) + list.get(2)); - return jList; - } - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java deleted file mode 100644 index 5d3d643bb..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java +++ /dev/null @@ -1,129 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SELECTIVITY_TABLE; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_TABLE; - -import java.io.IOException; - -import mvm.rya.joinselect.mr.utils.JoinSelectStatsUtil; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; - -public class FullTableSize extends Configured implements Tool { - - private static final String DELIM = "\u0000"; - - - - - - public static void main(String[] args) throws Exception { - ToolRunner.run(new FullTableSize(), args); - } - - - - - - - public static class FullTableMapper extends Mapper { - private static final IntWritable ONE = new IntWritable(1); - - - @Override - public void map(Key key, Value value, Context context) throws IOException, InterruptedException { - context.write(new Text("COUNT"), ONE); - } - } - - public static class FullTableReducer extends Reducer { - - @Override - public void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException { - int count = 0; - - for (IntWritable i : values) { - count += i.get(); - } - - String countStr = Integer.toString(count); - - Mutation m = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m.put(new Text("FullTableCardinality"), new Text(countStr), new Value(new byte[0])); - - context.write(new Text(""), m); - } - } - - public static class FullTableCombiner extends Reducer { - - @Override - public void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException { - - int count = 0; - - for (IntWritable i : values) { - count += i.get(); - } - - context.write(key, new IntWritable(count)); - } - } - - @Override - public int run(String[] args) throws Exception { - - Configuration conf = getConf(); - String inTable = conf.get(SPO_TABLE); - String outTable = conf.get(SELECTIVITY_TABLE); - String auths = conf.get(AUTHS); - - assert inTable != null && outTable != null; - - Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - JoinSelectStatsUtil.initTableMRJob(job, inTable, outTable, auths); - job.setMapperClass(FullTableMapper.class); - job.setCombinerClass(FullTableCombiner.class); - job.setReducerClass(FullTableReducer.class); - job.setNumReduceTasks(1); - - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java deleted file mode 100644 index bb227f33f..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java +++ /dev/null @@ -1,272 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.OUTPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_OUTPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_OUTPUTPATH; - -import java.io.IOException; - -import mvm.rya.joinselect.mr.utils.CardList; -import mvm.rya.joinselect.mr.utils.CardinalityType; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.JoinSelectStatsUtil; -import mvm.rya.joinselect.mr.utils.TripleCard; -import mvm.rya.joinselect.mr.utils.TripleEntry; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.WritableComparable; -import org.apache.hadoop.io.WritableComparator; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Partitioner; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.Tool; - -public class JoinSelectAggregate extends Configured implements Tool { - - public static class JoinSelectAggregateMapper extends Mapper { - - public void map(CompositeType key, TripleCard value, Context context) throws IOException, InterruptedException { - - context.write(key, value); - - } - - } - - public static class JoinReducer extends Reducer { - - public void reduce(CompositeType key, Iterable values, Context context) throws IOException, InterruptedException { - - CardinalityType card; - TripleEntry triple; - CardinalityType subjectCard = null; - CardinalityType objectCard = null; - CardinalityType predicateCard = null; - CardinalityType spCard = null; - CardinalityType soCard = null; - CardinalityType poCard = null; - CardList cList = new CardList((long) 0, (long) 0, (long) 0, (long) 0, (long) 0, (long) 0); - boolean listEmpty = true; - - // System.out.println("********************************************************************"); - // System.out.println("Key is " + key ); - - for (TripleCard val : values) { - - // System.out.println("Value in iterable is " + val); - if (!val.isCardNull()) { - card = val.getCard(); - - if (card.getCardType().toString().equals("object")) { - if (objectCard == null) { - objectCard = new CardinalityType(); - objectCard.set(card); - - } else if (objectCard.compareTo(card) > 0) { - // System.out.println(objectCard.compareTo(card)); - objectCard.set(card); - - } - - } else if (card.getCardType().toString().equals("predicate")) { - // System.out.println("Coming in here?"); - if (predicateCard == null) { - predicateCard = new CardinalityType(); - predicateCard.set(card); - - } else if (predicateCard.compareTo(card) > 0) { - predicateCard.set(card); - - } - } else if (card.getCardType().toString().equals("subject")) { - if (subjectCard == null) { - subjectCard = new CardinalityType(); - subjectCard.set(card); - - } else if (subjectCard.compareTo(card) > 0) { - subjectCard.set(card); - } - - } else if (card.getCardType().toString().equals("subjectpredicate")) { - if (spCard == null) { - spCard = new CardinalityType(); - spCard.set(card); - - } else if (spCard.compareTo(card) > 0) { - spCard.set(card); - - } - } else if (card.getCardType().toString().equals("subjectobject")) { - if (soCard == null) { - soCard = new CardinalityType(); - soCard.set(card); - - } else if (soCard.compareTo(card) > 0) { - soCard.set(card); - - } - } else if (card.getCardType().toString().equals("predicateobject")) { - if (poCard == null) { - poCard = new CardinalityType(); - poCard.set(card); - - } else if (poCard.compareTo(card) > 0) { - poCard.set(card); - - } - } - - } else { - - if (listEmpty) { - if (subjectCard != null || predicateCard != null || objectCard != null) { - - if (subjectCard != null) { - cList.setSCard(subjectCard.getCard().get()); - } - if (predicateCard != null) { - cList.setPCard(predicateCard.getCard().get()); - } - if (objectCard != null) { - cList.setOCard(objectCard.getCard().get()); - } - - listEmpty = false; - - } else if (spCard != null || poCard != null || soCard != null) { - - if (spCard != null) { - cList.setSPCard(spCard.getCard().get()); - } - if (poCard != null) { - cList.setPOCard(poCard.getCard().get()); - } - if (soCard != null) { - cList.setSOCard(soCard.getCard().get()); - } - - listEmpty = false; - } - - // System.out.println("Cardlist is " + cList); - // System.out.println("Cards are " + - // subjectCard.getCard() + "," + predicateCard.getCard() - // + - // "," + objectCard.getCard() + "," + spCard.getCard() + - // "," + poCard.getCard() + "," + soCard.getCard()); - // - } - - // only write record if cardList contains at least one - // nonzero entry - if (!val.isTeNull() && !listEmpty) { - - triple = (TripleEntry) val.getTE(); - - context.write(triple, cList); - // System.out.println("Triple is " + triple + - // " and cardinality is " + cList); - - } - - } - } - - } - - } - - public static class JoinSelectPartitioner extends Partitioner { - - @Override - public int getPartition(CompositeType key, TripleCard value, int numPartitions) { - return Math.abs(key.getOldKey().hashCode() * 127) % numPartitions; - } - - } - - public static class JoinSelectGroupComparator extends WritableComparator { - - protected JoinSelectGroupComparator() { - super(CompositeType.class, true); - } - - @SuppressWarnings("rawtypes") - @Override - public int compare(WritableComparable w1, WritableComparable w2) { - CompositeType ct1 = (CompositeType) w1; - CompositeType ct2 = (CompositeType) w2; - return ct1.getOldKey().compareTo(ct2.getOldKey()); - } - - } - - public static class JoinSelectSortComparator extends WritableComparator { - - protected JoinSelectSortComparator() { - super(CompositeType.class, true); - } - - @SuppressWarnings("rawtypes") - @Override - public int compare(WritableComparable w1, WritableComparable w2) { - CompositeType ct1 = (CompositeType) w1; - CompositeType ct2 = (CompositeType) w2; - return ct1.compareTo(ct2); - } - - } - - @Override - public int run(String[] args) throws Exception { - Configuration conf = getConf(); - String inPath1 = conf.get(PROSPECTS_OUTPUTPATH); - String inPath2 = conf.get(SPO_OUTPUTPATH); - String auths = conf.get(AUTHS); - String outPath = conf.get(OUTPUTPATH); - - assert inPath1 != null && inPath2 != null && outPath != null; - - Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); - - JoinSelectStatsUtil.initJoinMRJob(job, inPath1, inPath2, JoinSelectAggregateMapper.class, outPath, auths); - - job.setSortComparatorClass(JoinSelectSortComparator.class); - job.setGroupingComparatorClass(JoinSelectGroupComparator.class); - job.setPartitionerClass(JoinSelectPartitioner.class); - job.setReducerClass(JoinReducer.class); - job.setNumReduceTasks(32); - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java deleted file mode 100644 index e6a89cee3..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java +++ /dev/null @@ -1,60 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; - -public class JoinSelectDriver extends Configured implements Tool { - - public static void main(String[] args) throws Exception { - ToolRunner.run(new JoinSelectDriver(), args); - } - - @Override - public int run(String[] args) throws Exception { - - Configuration conf = getConf(); - System.out.println("Zookeepers are " + conf.get("zookeepers")); - - int res; - res = ToolRunner.run(conf, new FullTableSize(), args); - - if (res == 0) { - res = ToolRunner.run(conf, new JoinSelectSpoTableOutput(), args); - } - if (res == 0) { - res = ToolRunner.run(conf, new JoinSelectProspectOutput(), args); - } - if (res == 0) { - res = ToolRunner.run(conf, new JoinSelectAggregate(), args); - } - if (res == 0) { - res = ToolRunner.run(conf, new JoinSelectStatisticsSum(), args); - } - - return res; - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java deleted file mode 100644 index a12793d37..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java +++ /dev/null @@ -1,124 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_OUTPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_TABLE; - -import java.io.IOException; -import java.util.regex.Pattern; - -import mvm.rya.joinselect.mr.utils.CardinalityType; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.JoinSelectStatsUtil; -import mvm.rya.joinselect.mr.utils.TripleCard; - -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.util.Tool; - -public class JoinSelectProspectOutput extends Configured implements Tool { - - public static class CardinalityMapper extends Mapper { - - private static final String DELIM = "\u0000"; - - Text inText = new Text(); - Pattern splitPattern = Pattern.compile(DELIM); - - public void map(Key key, Value data, Context context) throws IOException, InterruptedException { - - key.getRow(inText); - String[] cardData = splitPattern.split(inText.toString().trim(), 4); - // System.out.println("Card data is " + cardData[0] + ", "+ cardData[1] + ", "+ cardData[2]); - if (cardData.length == 3 && ((cardData[0].equals("subject")) || (cardData[0].equals("object")) || (cardData[0].equals("predicate")))) { - Text tripleValType = new Text(cardData[0]); - Text cardKey = new Text(cardData[1]); - LongWritable ts = new LongWritable(Long.valueOf(cardData[2])); - - String s = new String(data.get()); - LongWritable card = new LongWritable(Long.parseLong(s)); - - CompositeType cType = new CompositeType(cardKey, new IntWritable(1)); - TripleCard tCard = new TripleCard(new CardinalityType(card, tripleValType, ts)); - - context.write(new CompositeType(cardKey, new IntWritable(1)), new TripleCard(new CardinalityType(card, tripleValType, ts))); - // System.out.println("Card mapper output key is " + cType + " and value is " + tCard ); - - } else if (cardData.length == 4 - && ((cardData[0].equals("subjectpredicate")) || (cardData[0].equals("subjectobject")) || (cardData[0].equals("predicateobject")))) { - - Text tripleValType = new Text(cardData[0]); - Text cardKey = new Text(cardData[1] + DELIM + cardData[2]); - LongWritable ts = new LongWritable(Long.valueOf(cardData[3])); - - String s = new String(data.get()); - LongWritable card = new LongWritable(Long.parseLong(s)); - - CompositeType cType = new CompositeType(cardKey, new IntWritable(1)); - TripleCard tCard = new TripleCard(new CardinalityType(card, tripleValType, ts)); - - context.write(new CompositeType(cardKey, new IntWritable(1)), new TripleCard(new CardinalityType(card, tripleValType, ts))); - // System.out.println("Card mapper output key is " + cType + " and value is " + tCard ); - - } - - } - - } - - @Override - public int run(String[] args) throws AccumuloSecurityException, IOException, ClassNotFoundException, InterruptedException { - - Configuration conf = getConf(); - String inTable = conf.get(PROSPECTS_TABLE); - String auths = conf.get(AUTHS); - String outPath = conf.get(PROSPECTS_OUTPUTPATH); - - assert inTable != null && outPath != null; - - Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); - - JoinSelectStatsUtil.initTabToSeqFileJob(job, inTable, outPath, auths); - job.setMapperClass(CardinalityMapper.class); - - job.setNumReduceTasks(0); - - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java deleted file mode 100644 index f7b167261..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java +++ /dev/null @@ -1,126 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_OUTPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_TABLE; - -import java.io.IOException; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.JoinSelectStatsUtil; -import mvm.rya.joinselect.mr.utils.TripleCard; -import mvm.rya.joinselect.mr.utils.TripleEntry; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.util.Tool; - -public class JoinSelectSpoTableOutput extends Configured implements Tool { - - public static class JoinSelectMapper extends Mapper { - - private RyaTripleContext ryaContext; - private static final String DELIM = "\u0000"; - - public void map(Key row, Value data, Context context) throws IOException, InterruptedException { - try { - ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(context.getConfiguration())); - RyaStatement ryaStatement = ryaContext.deserializeTriple(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, new TripleRow(row.getRow().getBytes(), row - .getColumnFamily().getBytes(), row.getColumnQualifier().getBytes(), row.getTimestamp(), row.getColumnVisibility().getBytes(), data.get())); - - Text s = new Text(ryaStatement.getSubject().getData()); - Text p = new Text(ryaStatement.getPredicate().getData()); - Text o = new Text(ryaStatement.getObject().getData()); - Text sp = new Text(ryaStatement.getSubject().getData() + DELIM + ryaStatement.getPredicate().getData()); - Text po = new Text(ryaStatement.getPredicate().getData() + DELIM + ryaStatement.getObject().getData()); - Text so = new Text(ryaStatement.getSubject().getData() + DELIM + ryaStatement.getObject().getData()); - Text ps = new Text(ryaStatement.getPredicate().getData() + DELIM + ryaStatement.getSubject().getData()); - Text op = new Text(ryaStatement.getObject().getData() + DELIM + ryaStatement.getPredicate().getData()); - Text os = new Text(ryaStatement.getObject().getData() + DELIM + ryaStatement.getSubject().getData()); - - TripleEntry t1 = new TripleEntry(s, p, new Text("subject"), new Text("predicate"), new Text("object")); - TripleEntry t2 = new TripleEntry(p, o, new Text("predicate"), new Text("object"), new Text("subject")); - TripleEntry t3 = new TripleEntry(o, s, new Text("object"), new Text("subject"), new Text("predicate")); - TripleEntry t4 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("predicateobject")); - TripleEntry t5 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("objectsubject")); - TripleEntry t6 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("subjectpredicate")); - TripleEntry t7 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("objectpredicate")); - TripleEntry t8 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("subjectobject")); - TripleEntry t9 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("predicatesubject")); - - context.write(new CompositeType(o, new IntWritable(2)), new TripleCard(t1)); - context.write(new CompositeType(s, new IntWritable(2)), new TripleCard(t2)); - context.write(new CompositeType(p, new IntWritable(2)), new TripleCard(t3)); - context.write(new CompositeType(po, new IntWritable(2)), new TripleCard(t4)); - context.write(new CompositeType(so, new IntWritable(2)), new TripleCard(t5)); - context.write(new CompositeType(sp, new IntWritable(2)), new TripleCard(t6)); - context.write(new CompositeType(op, new IntWritable(2)), new TripleCard(t7)); - context.write(new CompositeType(os, new IntWritable(2)), new TripleCard(t8)); - context.write(new CompositeType(ps, new IntWritable(2)), new TripleCard(t9)); - - } catch (TripleRowResolverException e) { - e.printStackTrace(); - } - - } - - } - - @Override - public int run(String[] args) throws Exception { - - Configuration conf = getConf(); - String inTable = conf.get(SPO_TABLE); - String auths = conf.get(AUTHS); - String outPath = conf.get(SPO_OUTPUTPATH); - - assert inTable != null && outPath != null; - - Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); - - JoinSelectStatsUtil.initTabToSeqFileJob(job, inTable, outPath, auths); - job.setMapperClass(JoinSelectMapper.class); - job.setNumReduceTasks(0); - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java deleted file mode 100644 index ef271ffb3..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java +++ /dev/null @@ -1,220 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.INPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SELECTIVITY_TABLE; - -import java.io.IOException; - -import mvm.rya.joinselect.mr.utils.CardList; -import mvm.rya.joinselect.mr.utils.JoinSelectStatsUtil; -import mvm.rya.joinselect.mr.utils.TripleEntry; - -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.Tool; - -public class JoinSelectStatisticsSum extends Configured implements Tool { - - // TODO need to tweak this class to compute join cardinalities over more than one variable - - public static class CardinalityIdentityMapper extends Mapper { - - public void map(TripleEntry key, CardList value, Context context) throws IOException, InterruptedException { - - // System.out.println("Keys are " + key + " and values are " + value); - - if (key.getSecond().toString().length() != 0 && key.getSecondPos().toString().length() != 0) { - TripleEntry te1 = new TripleEntry(key.getFirst(), new Text(""), key.getFirstPos(), new Text(""), key.getKeyPos()); - TripleEntry te2 = new TripleEntry(key.getSecond(), new Text(""), key.getSecondPos(), new Text(""), key.getKeyPos()); - - context.write(te1, value); - context.write(te2, value); - context.write(key, value); - // System.out.println("Output key values from mapper are " + te1 + " and " + value + "\n" - // + te2 + " and " + value + "\n" + key + " and " + value + "\n"); - } else if (key.getSecond().toString().length() == 0 && key.getSecondPos().toString().length() == 0) { - - context.write(key, value); - // System.out.println("Output key values from mapper are " + "\n" + key + " and " + value + "\n" + "\n"); - } - - } - - } - - public static class CardinalityIdentityReducer extends Reducer { - - private static final String DELIM = "\u0000"; - - public void reduce(TripleEntry te, Iterable values, Context context) throws IOException, InterruptedException { - - CardList cl = new CardList(); - LongWritable s = new LongWritable(0); - LongWritable p = new LongWritable(0); - LongWritable o = new LongWritable(0); - LongWritable sp = new LongWritable(0); - LongWritable po = new LongWritable(0); - LongWritable so = new LongWritable(0); - - // System.out.println("***********************************************************\n" - // + "key is " + te); - - for (CardList val : values) { - // System.out.println("Value is " + val); - s.set(s.get() + val.getcardS().get()); - p.set(p.get() + val.getcardP().get()); - o.set(o.get() + val.getcardO().get()); - sp.set(sp.get() + val.getcardSP().get()); - po.set(po.get() + val.getcardPO().get()); - so.set(so.get() + val.getcardSO().get()); - } - cl.setCard(s, p, o, sp, po, so); - - Text row; - - if (te.getSecond().toString().length() > 0) { - row = new Text(te.getFirstPos().toString() + te.getSecondPos().toString() + DELIM + te.getFirst().toString() + DELIM + te.getSecond()); - } else { - row = new Text(te.getFirstPos().toString() + DELIM + te.getFirst().toString()); - } - - Mutation m1, m2, m3; - - if (te.getKeyPos().toString().equals("subject") || te.getKeyPos().toString().equals("predicate") || te.getKeyPos().toString().equals("object")) { - m1 = new Mutation(row); - m1.put(new Text(te.getKeyPos().toString() + "subject"), new Text(cl.getcardS().toString()), new Value(new byte[0])); - m2 = new Mutation(row); - m2.put(new Text(te.getKeyPos().toString() + "predicate"), new Text(cl.getcardP().toString()), new Value(new byte[0])); - m3 = new Mutation(row); - m3.put(new Text(te.getKeyPos().toString() + "object"), new Text(cl.getcardO().toString()), new Value(new byte[0])); - - } else if (te.getKeyPos().toString().equals("predicatesubject") || te.getKeyPos().toString().equals("objectpredicate") - || te.getKeyPos().toString().equals("subjectobject")) { - - String jOrder = reverseJoinOrder(te.getKeyPos().toString()); - - m1 = new Mutation(row); - m1.put(new Text(jOrder + "predicatesubject"), new Text(cl.getcardSP().toString()), new Value(new byte[0])); - m2 = new Mutation(row); - m2.put(new Text(jOrder + "objectpredicate"), new Text(cl.getcardPO().toString()), new Value(new byte[0])); - m3 = new Mutation(row); - m3.put(new Text(jOrder + "subjectobject"), new Text(cl.getcardSO().toString()), new Value(new byte[0])); - - } else { - - m1 = new Mutation(row); - m1.put(new Text(te.getKeyPos().toString() + "subjectpredicate"), new Text(cl.getcardSP().toString()), new Value(new byte[0])); - m2 = new Mutation(row); - m2.put(new Text(te.getKeyPos().toString() + "predicateobject"), new Text(cl.getcardPO().toString()), new Value(new byte[0])); - m3 = new Mutation(row); - m3.put(new Text(te.getKeyPos().toString() + "objectsubject"), new Text(cl.getcardSO().toString()), new Value(new byte[0])); - - } - - // TODO add the appropriate table name here - context.write(new Text(""), m1); - context.write(new Text(""), m2); - context.write(new Text(""), m3); - } - - private String reverseJoinOrder(String s) { - - if (s.equals("predicatesubject")) { - return "subjectpredicate"; - } else if (s.equals("objectpredicate")) { - return "predicateobject"; - } else if (s.equals("subjectobject")) { - return "objectsubject"; - } else { - throw new IllegalArgumentException("Invalid join type."); - } - - } - - } - - public static class CardinalityIdentityCombiner extends Reducer { - - @Override - public void reduce(TripleEntry key, Iterable values, Context context) throws IOException, InterruptedException { - - CardList cl = new CardList(); - LongWritable s = new LongWritable(0); - LongWritable p = new LongWritable(0); - LongWritable o = new LongWritable(0); - LongWritable sp = new LongWritable(0); - LongWritable po = new LongWritable(0); - LongWritable so = new LongWritable(0); - - for (CardList val : values) { - s.set(s.get() + val.getcardS().get()); - p.set(p.get() + val.getcardP().get()); - o.set(o.get() + val.getcardO().get()); - sp.set(sp.get() + val.getcardSP().get()); - po.set(po.get() + val.getcardPO().get()); - so.set(so.get() + val.getcardSO().get()); - } - - cl.setCard(s, p, o, sp, po, so); - context.write(key, cl); - - } - - } - - @Override - public int run(String[] args) throws AccumuloSecurityException, IOException, ClassNotFoundException, InterruptedException { - - Configuration conf = getConf(); - String outTable = conf.get(SELECTIVITY_TABLE); - String auths = conf.get(AUTHS); - String inPath = conf.get(INPUTPATH); - - assert outTable != null && inPath != null; - - Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - JoinSelectStatsUtil.initSumMRJob(job, inPath, outTable, auths); - - job.setMapperClass(CardinalityIdentityMapper.class); - job.setCombinerClass(CardinalityIdentityCombiner.class); - job.setReducerClass(CardinalityIdentityReducer.class); - job.setNumReduceTasks(32); - - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java deleted file mode 100644 index b8fd2746a..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java +++ /dev/null @@ -1,209 +0,0 @@ -package mvm.rya.joinselect.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.WritableComparable; - -public class CardList implements WritableComparable { - - private LongWritable cardS; - private LongWritable cardP; - private LongWritable cardO; - private LongWritable cardSP; - private LongWritable cardPO; - private LongWritable cardSO; - - public CardList() { - cardS = new LongWritable(); - cardP = new LongWritable(); - cardO = new LongWritable(); - cardSP = new LongWritable(); - cardSO = new LongWritable(); - cardPO = new LongWritable(); - - } - - public CardList(long cardS, long cardP, long cardO, long cardSP, long cardPO, long cardSO) { - this.cardS = new LongWritable(cardS); - this.cardP = new LongWritable(cardP); - this.cardO = new LongWritable(cardO); - this.cardSP = new LongWritable(cardSP); - this.cardSO = new LongWritable(cardSO); - this.cardPO = new LongWritable(cardPO); - } - - public CardList(LongWritable cardS, LongWritable cardP, LongWritable cardO, LongWritable cardSP, LongWritable cardPO, LongWritable cardSO) { - - this.cardS = cardS; - this.cardP = cardP; - this.cardO = cardO; - this.cardSP = cardSP; - this.cardPO = cardPO; - this.cardSO = cardSO; - - } - - public void setCard(LongWritable cardS, LongWritable cardP, LongWritable cardO, LongWritable cardSP, LongWritable cardPO, LongWritable cardSO) { - this.cardS = cardS; - this.cardP = cardP; - this.cardO = cardO; - this.cardSP = cardSP; - this.cardPO = cardPO; - this.cardSO = cardSO; - - } - - public void setSCard(long cardS) { - this.cardS = new LongWritable(cardS); - } - - public void setPCard(long cardP) { - this.cardP = new LongWritable(cardP); - } - - public void setOCard(long cardO) { - this.cardO = new LongWritable(cardO); - } - - public void setSPCard(long cardSP) { - this.cardSP = new LongWritable(cardSP); - } - - public void setSOCard(long cardSO) { - this.cardSO = new LongWritable(cardSO); - } - - public void setPOCard(long cardPO) { - this.cardPO = new LongWritable(cardPO); - } - - public LongWritable getcardS() { - return this.cardS; - } - - public LongWritable getcardP() { - return this.cardP; - } - - public LongWritable getcardO() { - return this.cardO; - } - - public LongWritable getcardPO() { - return this.cardPO; - } - - public LongWritable getcardSO() { - return this.cardSO; - } - - public LongWritable getcardSP() { - return this.cardSP; - } - - @Override - public void write(DataOutput out) throws IOException { - cardS.write(out); - cardP.write(out); - cardO.write(out); - cardSO.write(out); - cardPO.write(out); - cardSP.write(out); - - } - - @Override - public void readFields(DataInput in) throws IOException { - cardS.readFields(in); - cardP.readFields(in); - cardO.readFields(in); - cardSO.readFields(in); - cardPO.readFields(in); - cardSP.readFields(in); - - } - - @Override - public int hashCode() { - int result = 7; - result = result * 17 + cardS.hashCode(); - result = result * 17 + cardP.hashCode(); - result = result * 17 + cardO.hashCode(); - result = result * 17 + cardSP.hashCode(); - result = result * 17 + cardPO.hashCode(); - result = result * 17 + cardSO.hashCode(); - - return result; - - } - - @Override - public boolean equals(Object o) { - if (o instanceof CardList) { - CardList comp = (CardList) o; - return cardS.equals(comp.cardS) && cardP.equals(comp.cardP) && cardO.equals(comp.cardO) && cardSP.equals(comp.cardSP) && cardSO.equals(comp.cardSO) - && cardPO.equals(comp.cardPO); - - } - return false; - } - - @Override - public String toString() { - return cardS + "\t" + cardP + "\t" + cardO + "\t" + cardSP + "\t" + cardPO + "\t" + cardSO; - - } - - @Override - public int compareTo(CardList o) { - - int cmp = cardS.compareTo(o.cardS); - if (cmp != 0) { - return cmp; - } - cmp = cardP.compareTo(o.cardP); - if (cmp != 0) { - return cmp; - } - cmp = cardO.compareTo(o.cardO); - if (cmp != 0) { - return cmp; - } - cmp = cardSP.compareTo(o.cardSP); - if (cmp != 0) { - return cmp; - } - cmp = cardPO.compareTo(o.cardPO); - if (cmp != 0) { - return cmp; - } - - return cardSO.compareTo(o.cardSO); - - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java deleted file mode 100644 index 924f596b4..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java +++ /dev/null @@ -1,149 +0,0 @@ -package mvm.rya.joinselect.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.WritableComparable; - -public class CardinalityType implements WritableComparable { - - private LongWritable card; - private Text cardType; - private LongWritable ts; - - public CardinalityType() { - card = new LongWritable(); - cardType = new Text(); - ts = new LongWritable(); - } - - public CardinalityType(int card, String cardType, long ts) { - - this.card = new LongWritable(card); - this.cardType = new Text(cardType); - this.ts = new LongWritable(ts); - - } - - public CardinalityType(LongWritable card, Text cardType, LongWritable ts) { - - this.card = card; - this.ts = ts; - this.cardType = cardType; - - } - - public void set(CardinalityType ct) { - this.card.set(ct.card.get()); - this.ts.set(ct.ts.get()); - this.cardType.set(ct.cardType); - } - - public void setCard(LongWritable card) { - this.card = card; - - } - - public void setCardType(Text cardType) { - this.cardType = cardType; - } - - public void setTS(LongWritable ts) { - this.ts = ts; - } - - public LongWritable getCard() { - return this.card; - } - - public Text getCardType() { - return this.cardType; - } - - public LongWritable getTS() { - return this.ts; - } - - @Override - public void write(DataOutput out) throws IOException { - card.write(out); - cardType.write(out); - ts.write(out); - - } - - @Override - public void readFields(DataInput in) throws IOException { - card.readFields(in); - cardType.readFields(in); - ts.readFields(in); - - } - - @Override - public int hashCode() { - int result = 7; - result = result * 17 + card.hashCode(); - result = result * 17 + cardType.hashCode(); - result = result * 17 + ts.hashCode(); - - return result; - - } - - @Override - public boolean equals(Object o) { - if (o instanceof CardinalityType) { - CardinalityType trip = (CardinalityType) o; - return card.equals(trip.card) && cardType.equals(trip.cardType) && ts.equals(trip.ts); - - } - return false; - } - - @Override - public String toString() { - return card + " " + cardType + " " + ts; - - } - - @Override - public int compareTo(CardinalityType o) { - - int cmp = cardType.compareTo(o.cardType); - if (cmp != 0) { - return cmp; - } - cmp = ts.compareTo(o.ts); - if (cmp != 0) { - return cmp; - } - return card.compareTo(o.card); - - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java deleted file mode 100644 index 57e6ee2e2..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java +++ /dev/null @@ -1,122 +0,0 @@ -package mvm.rya.joinselect.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.WritableComparable; - -public class CompositeType implements WritableComparable { - - private Text oldKey; - private IntWritable priority; - - public CompositeType() { - oldKey = new Text(); - priority = new IntWritable(); - } - - public CompositeType(String oldKey, int priority) { - this.oldKey = new Text(oldKey); - this.priority = new IntWritable(priority); - } - - public CompositeType(Text oldKey, IntWritable priority) { - - this.oldKey = oldKey; - this.priority = priority; - - } - - public void setOldKey(Text oldKey) { - this.oldKey = oldKey; - - } - - public void setPriority(IntWritable priority) { - this.priority = priority; - } - - public Text getOldKey() { - return this.oldKey; - } - - public IntWritable getPriority() { - return this.priority; - } - - @Override - public void write(DataOutput out) throws IOException { - oldKey.write(out); - priority.write(out); - - } - - @Override - public void readFields(DataInput in) throws IOException { - oldKey.readFields(in); - priority.readFields(in); - - } - - @Override - public int hashCode() { - int result = 7; - result = result * 17 + oldKey.hashCode(); - // result = result*17+ priority.hashCode(); - - return result; - - } - - @Override - public boolean equals(Object o) { - if (o instanceof CompositeType) { - CompositeType comp = (CompositeType) o; - return oldKey.equals(comp.oldKey) && priority.equals(comp.priority); - - } - return false; - } - - @Override - public String toString() { - return oldKey + "\t" + priority; - - } - - @Override - public int compareTo(CompositeType o) { - int compare = getOldKey().compareTo(o.getOldKey()); - if (compare != 0) { - return compare; - } - - return getPriority().compareTo(o.getPriority()); - - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java deleted file mode 100644 index 3ec34d07e..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java +++ /dev/null @@ -1,46 +0,0 @@ -package mvm.rya.joinselect.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -public class JoinSelectConstants { - - public static final String COUNT = "count"; - public static final String METADATA = "metadata"; - public static final byte[] EMPTY = new byte[0]; - - // config properties - public static final String PERFORMANT = "performant"; - public static final String USERNAME = "username"; - public static final String PASSWORD = "password"; - public static final String INSTANCE = "instance"; - public static final String ZOOKEEPERS = "zookeepers"; - public static final String INPUTPATH = "inputpath"; - public static final String OUTPUTPATH = "outputpath"; - public static final String PROSPECTS_OUTPUTPATH = "prospects.outputpath"; - public static final String SPO_OUTPUTPATH = "spo.outputpath"; - public static final String AUTHS = "auths"; - public static final String PROSPECTS_TABLE = "prospects.table"; - public static final String SPO_TABLE = "spo.table"; - public static final String SELECTIVITY_TABLE = "selectivity.table"; - public static final String MOCK = "mock"; - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java deleted file mode 100644 index cf8db40e7..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java +++ /dev/null @@ -1,183 +0,0 @@ -package mvm.rya.joinselect.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV; -import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE; -import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.INSTANCE; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PASSWORD; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.USERNAME; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.ZOOKEEPERS; - -import java.io.IOException; - -import mvm.rya.api.resolver.triple.TripleRow; - -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.lib.input.MultipleInputs; -import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; -import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; - -public class JoinSelectStatsUtil { - - public static void initSumMRJob(Job job, String inputPath, String outtable, String auths) throws AccumuloSecurityException, IOException { - Configuration conf = job.getConfiguration(); - String username = conf.get(USERNAME); - String password = conf.get(PASSWORD); - String instance = conf.get(INSTANCE); - String zookeepers = conf.get(ZOOKEEPERS); - - if (zookeepers != null) { - AccumuloOutputFormat.setConnectorInfo(job, username, new PasswordToken(password)); - AccumuloOutputFormat.setZooKeeperInstance(job, instance, zookeepers); - } else { - throw new IllegalArgumentException("Must specify zookeepers"); - } - - SequenceFileInputFormat.addInputPath(job, new Path(inputPath)); - job.setInputFormatClass(SequenceFileInputFormat.class); - job.setMapOutputKeyClass(TripleEntry.class); - job.setMapOutputValueClass(CardList.class); - - AccumuloOutputFormat.setDefaultTableName(job, outtable); - job.setOutputFormatClass(AccumuloOutputFormat.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Mutation.class); - - } - - public static void initTableMRJob(Job job, String intable, String outtable, String auths) throws AccumuloSecurityException { - Configuration conf = job.getConfiguration(); - String username = conf.get(USERNAME); - String password = conf.get(PASSWORD); - String instance = conf.get(INSTANCE); - String zookeepers = conf.get(ZOOKEEPERS); - - System.out.println("Zookeepers are " + auths); - - if (zookeepers != null) { - AccumuloInputFormat.setZooKeeperInstance(job, instance, zookeepers); - AccumuloOutputFormat.setZooKeeperInstance(job, instance, zookeepers); - } else { - throw new IllegalArgumentException("Must specify either mock or zookeepers"); - } - - AccumuloInputFormat.setConnectorInfo(job, username, new PasswordToken(password)); - AccumuloInputFormat.setScanAuthorizations(job, new Authorizations(auths)); - AccumuloInputFormat.setInputTableName(job, intable); - job.setInputFormatClass(AccumuloInputFormat.class); - job.setMapOutputKeyClass(Text.class); - job.setMapOutputValueClass(IntWritable.class); - - // OUTPUT - AccumuloOutputFormat.setConnectorInfo(job, username, new PasswordToken(password)); - AccumuloOutputFormat.setDefaultTableName(job, outtable); - job.setOutputFormatClass(AccumuloOutputFormat.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Mutation.class); - - } - - public static void initTabToSeqFileJob(Job job, String intable, String outpath, String auths) throws AccumuloSecurityException { - - Configuration conf = job.getConfiguration(); - String username = conf.get(USERNAME); - String password = conf.get(PASSWORD); - String instance = conf.get(INSTANCE); - String zookeepers = conf.get(ZOOKEEPERS); - - System.out.println("Zookeepers are " + auths); - - if (zookeepers != null) { - AccumuloInputFormat.setZooKeeperInstance(job, instance, zookeepers); - } else { - throw new IllegalArgumentException("Must specify either mock or zookeepers"); - } - - AccumuloInputFormat.setConnectorInfo(job, username, new PasswordToken(password)); - AccumuloInputFormat.setScanAuthorizations(job, new Authorizations(auths)); - AccumuloInputFormat.setInputTableName(job, intable); - job.setInputFormatClass(AccumuloInputFormat.class); - job.setMapOutputKeyClass(CompositeType.class); - job.setMapOutputValueClass(TripleCard.class); - - // OUTPUT - SequenceFileOutputFormat.setOutputPath(job, new Path(outpath)); - job.setOutputFormatClass(SequenceFileOutputFormat.class); - job.setOutputKeyClass(CompositeType.class); - job.setOutputValueClass(TripleCard.class); - - } - - public static void initJoinMRJob(Job job, String prospectsPath, String spoPath, Class> mapperClass, - String outPath, String auths) throws AccumuloSecurityException { - - MultipleInputs.addInputPath(job, new Path(prospectsPath), SequenceFileInputFormat.class, mapperClass); - MultipleInputs.addInputPath(job, new Path(spoPath), SequenceFileInputFormat.class, mapperClass); - job.setMapOutputKeyClass(CompositeType.class); - job.setMapOutputValueClass(TripleCard.class); - - SequenceFileOutputFormat.setOutputPath(job, new Path(outPath)); - job.setOutputFormatClass(SequenceFileOutputFormat.class); - job.setOutputKeyClass(TripleEntry.class); - job.setOutputValueClass(CardList.class); - - } - - public static Mutation createMutation(TripleRow tripleRow) { - Mutation mutation = new Mutation(new Text(tripleRow.getRow())); - byte[] columnVisibility = tripleRow.getColumnVisibility(); - ColumnVisibility cv = columnVisibility == null ? EMPTY_CV : new ColumnVisibility(columnVisibility); - Long timestamp = tripleRow.getTimestamp(); - boolean hasts = timestamp != null; - timestamp = timestamp == null ? 0l : timestamp; - byte[] value = tripleRow.getValue(); - Value v = value == null ? EMPTY_VALUE : new Value(value); - byte[] columnQualifier = tripleRow.getColumnQualifier(); - Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier); - byte[] columnFamily = tripleRow.getColumnFamily(); - Text cfText = columnFamily == null ? EMPTY_TEXT : new Text(columnFamily); - - if (hasts) { - mutation.put(cfText, cqText, cv, timestamp, v); - } else { - mutation.put(cfText, cqText, cv, v); - - } - return mutation; - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java deleted file mode 100644 index 467f7545b..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java +++ /dev/null @@ -1,145 +0,0 @@ -package mvm.rya.joinselect.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import org.apache.hadoop.io.WritableComparable; - -public class TripleCard implements WritableComparable { - - private CardinalityType card = null; - private TripleEntry te = null; - - private CardinalityType tempCard = new CardinalityType(); - private TripleEntry tempTe = new TripleEntry(); - - public TripleCard() {} - - public TripleCard(CardinalityType card) { - this.setCard(card); - } - - public TripleCard(TripleEntry te) { - this.setTE(te); - } - - public void setCard(CardinalityType card) { - tempCard.set(card); - this.card = tempCard; - this.te = null; - } - - public void setTE(TripleEntry te) { - tempTe.setTE(te); - this.te = tempTe; - this.card = null; - } - - public CardinalityType getCard() { - return this.card; - } - - public TripleEntry getTE() { - return this.te; - } - - public boolean isCardNull() { - return (card == null); - } - - public boolean isTeNull() { - return (te == null); - } - - @Override - public void write(DataOutput out) throws IOException { - if (card != null) { - out.writeBoolean(true); - card.write(out); - } else { - out.writeBoolean(false); - te.write(out); - } - - } - - @Override - public void readFields(DataInput in) throws IOException { - if (in.readBoolean()) { - tempCard.readFields(in); - card = tempCard; - te = null; - } else { - tempTe.readFields(in); - te = tempTe; - card = null; - } - } - - @Override - public int hashCode() { - int result = 7; - if (card != null) { - result = result * 17 + card.hashCode(); - } else { - result = result * 17 + te.hashCode(); - } - return result; - - } - - @Override - public boolean equals(Object o) { - if (o instanceof TripleCard) { - TripleCard comp = (TripleCard) o; - if (card != null) { - return card.equals(comp.card); - } else { - return te.equals(comp.te); - } - } - return false; - } - - @Override - public String toString() { - if (card != null) { - return card.toString(); - } else { - return te.toString(); - } - } - - @Override - public int compareTo(TripleCard o) { - - if (card != null) { - return card.compareTo(o.card); - } else { - return te.compareTo(o.te); - } - } - -} diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java deleted file mode 100644 index 7deb346ee..000000000 --- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java +++ /dev/null @@ -1,180 +0,0 @@ -package mvm.rya.joinselect.mr.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.WritableComparable; - -public class TripleEntry implements WritableComparable { - - private Text first; - private Text second; - private Text firstPos; - private Text secondPos; - private Text keyPos; - - public TripleEntry() { - - first = new Text(); - second = new Text(); - firstPos = new Text(); - secondPos = new Text(); - keyPos = new Text(); - - } - - public TripleEntry(String first, String second, String firstPos, String secondPos, String keyPos) { - this.first = new Text(first); - this.second = new Text(second); - this.firstPos = new Text(firstPos); - this.secondPos = new Text(secondPos); - this.keyPos = new Text(keyPos); - } - - public TripleEntry(Text first, Text second, Text firstPos, Text secondPos, Text keyPos) { - this.first = first; - this.second = second; - this.firstPos = firstPos; - this.secondPos = secondPos; - this.keyPos = keyPos; - } - - public void setEntry(Text first, Text second) { - this.first = first; - this.second = second; - } - - public void setPosition(Text firstPos, Text secondPos, Text keyPos) { - this.firstPos = firstPos; - this.secondPos = secondPos; - this.keyPos = keyPos; - } - - public void setTE(TripleEntry te) { - - this.first.set(te.first); - this.second.set(te.second); - this.firstPos.set(te.firstPos); - this.secondPos.set(te.secondPos); - this.keyPos.set(te.keyPos); - - } - - public Text getFirst() { - return this.first; - } - - public Text getSecond() { - return this.second; - } - - public Text getFirstPos() { - return this.firstPos; - } - - public Text getSecondPos() { - return this.secondPos; - } - - public Text getKeyPos() { - return this.keyPos; - } - - @Override - public void write(DataOutput out) throws IOException { - first.write(out); - second.write(out); - firstPos.write(out); - secondPos.write(out); - keyPos.write(out); - - } - - @Override - public void readFields(DataInput in) throws IOException { - first.readFields(in); - second.readFields(in); - firstPos.readFields(in); - secondPos.readFields(in); - keyPos.readFields(in); - - } - - @Override - public int hashCode() { - int result = 7; - result = result * 17 + first.hashCode(); - result = result * 17 + second.hashCode(); - result = result * 17 + firstPos.hashCode(); - result = result * 17 + secondPos.hashCode(); - result = result * 17 + keyPos.hashCode(); - - return result; - - } - - @Override - public boolean equals(Object o) { - if (o instanceof TripleEntry) { - TripleEntry trip = (TripleEntry) o; - return first.equals(trip.first) && second.equals(trip.second) && firstPos.equals(trip.firstPos) && secondPos.equals(trip.secondPos) - && keyPos.equals(trip.keyPos); - - } - return false; - } - - @Override - public String toString() { - return first + "\t" + firstPos + "\t" + second + "\t" + secondPos + "\t" + keyPos; - - } - - @Override - public int compareTo(TripleEntry o) { - - int cmp = first.compareTo(o.first); - if (cmp != 0) { - return cmp; - } - cmp = firstPos.compareTo(o.firstPos); - if (cmp != 0) { - return cmp; - } - cmp = second.compareTo(o.second); - if (cmp != 0) { - return cmp; - } - - cmp = secondPos.compareTo(o.secondPos); - if (cmp != 0) { - return cmp; - } - return keyPos.compareTo(o.keyPos); - - } - -} diff --git a/extras/rya.prospector/src/main/resources/META-INF/services/mvm.rya.prospector.plans.IndexWorkPlan b/extras/rya.prospector/src/main/resources/META-INF/services/mvm.rya.prospector.plans.IndexWorkPlan deleted file mode 100644 index 38258c1ca..000000000 --- a/extras/rya.prospector/src/main/resources/META-INF/services/mvm.rya.prospector.plans.IndexWorkPlan +++ /dev/null @@ -1 +0,0 @@ -mvm.rya.prospector.plans.impl.CountPlan \ No newline at end of file diff --git a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy b/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy deleted file mode 100644 index 766a239e5..000000000 --- a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.mr - -import com.google.common.collect.Iterators -import com.google.common.collect.Lists -import mvm.rya.accumulo.AccumuloRyaDAO -import mvm.rya.accumulo.AccumuloRdfConfiguration -import mvm.rya.api.persist.RdfEvalStatsDAO -import mvm.rya.api.domain.RyaStatement -import mvm.rya.api.domain.RyaType -import mvm.rya.api.domain.RyaURI -import mvm.rya.prospector.domain.IndexEntry -import mvm.rya.prospector.domain.TripleValueType -import mvm.rya.prospector.service.ProspectorService -import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO -import mvm.rya.prospector.utils.ProspectorConstants -import org.apache.accumulo.core.client.Instance -import org.apache.accumulo.core.client.mock.MockInstance -import org.apache.accumulo.core.security.Authorizations -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.fs.Path -import org.apache.hadoop.util.ToolRunner -import org.junit.Test -import org.openrdf.model.vocabulary.XMLSchema -import org.openrdf.model.impl.URIImpl - -import static org.junit.Assert.assertEquals -import org.openrdf.model.impl.LiteralImpl -import org.openrdf.model.Value - -/** - * Date: 12/4/12 - * Time: 4:33 PM - */ -class ProspectorTest { - - @Test - public void testCount() throws Exception { - - Instance mock = new MockInstance("accumulo"); - - def connector = mock.getConnector("user", "pass".bytes) - def intable = "rya_spo" - def outtable = "rya_prospects" - if (connector.tableOperations().exists(outtable)) - connector.tableOperations().delete(outtable) - connector.tableOperations().create(outtable) - - AccumuloRyaDAO ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConnector(connector); - ryaDAO.init() - - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata1"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata2"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("12"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1235"), new RyaURI("urn:gem#pred"), new RyaType(XMLSchema.INTEGER, "12"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1235"), new RyaURI("urn:gem#pred1"), new RyaType("12"))) - - def confFile = "stats_cluster_config.xml" - def confPath = new Path(getClass().getClassLoader().getResource(confFile).toString()) - def args = (String[]) [confPath]; - ToolRunner.run(new Prospector(), args); - debugTable(connector, outtable) - - def scanner = connector.createScanner(outtable, new Authorizations("U", "FOUO")) - def iter = scanner.iterator() -// assertEquals(11, Iterators.size(iter)) - - ryaDAO.destroy() - - def conf = new Configuration() - conf.addResource(confPath) - // debugTable(mrInfo, outtable) - - def service = new ProspectorService(connector, outtable) - def auths = (String[]) ["U", "FOUO"] - def prospects = service.getProspects(auths) - def plist = Lists.newArrayList(prospects) - assertEquals(1, plist.size()) - - def rdfConf = new AccumuloRdfConfiguration(conf) - rdfConf.setAuths("U","FOUO") - - prospects = service.getProspectsInRange(System.currentTimeMillis() - 100000, System.currentTimeMillis() + 10000, auths) - plist = Lists.newArrayList(prospects) - assertEquals(1, plist.size()) - - List queryTerms = new ArrayList(); - queryTerms.add("urn:gem:etype"); - def query = service.query(plist, ProspectorConstants.COUNT, TripleValueType.entity.name(), queryTerms, XMLSchema.ANYURI.stringValue(), auths) - assertEquals(1, query.size()) -// assertEquals( -// new IndexEntry(index: ProspectorConstants.COUNT, data: "urn:gem:etype", dataType: XMLSchema.ANYURI.stringValue(), -// tripleValueType: TripleValueType.entity, visibility: "", count: -1, timestamp: plist.get(0)), -// query.get(0)) - - queryTerms = new ArrayList(); - queryTerms.add("urn:gem:etype#1234"); - query = service.query(plist, ProspectorConstants.COUNT, TripleValueType.subject.name(), queryTerms, XMLSchema.ANYURI.stringValue(), auths) - assertEquals(1, query.size()) - - queryTerms = new ArrayList(); - queryTerms.add("urn:gem#pred"); - query = service.query(plist, ProspectorConstants.COUNT, TripleValueType.predicate.name(), queryTerms, XMLSchema.ANYURI.stringValue(), auths) - assertEquals(1, query.size()) - assertEquals( - new IndexEntry(index: ProspectorConstants.COUNT, data: "urn:gem#pred", dataType: XMLSchema.ANYURI.stringValue(), - tripleValueType: TripleValueType.predicate, visibility: "", count: 4l, timestamp: plist.get(0)), - query.get(0)) - - queryTerms = new ArrayList(); - queryTerms.add("mydata1"); - query = service.query(plist, ProspectorConstants.COUNT, TripleValueType.object.name(), queryTerms, XMLSchema.STRING.stringValue(), auths) - assertEquals(1, query.size()) -// assertEquals( -// new IndexEntry(index: ProspectorConstants.COUNT, data: "mydata1", dataType: XMLSchema.STRING.stringValue(), -// tripleValueType: TripleValueType.object, visibility: "", count: -1, timestamp: plist.get(0)), -// query.get(0)) - - queryTerms = new ArrayList(); - queryTerms.add("urn:gem:etype#1234"); - queryTerms.add("urn:gem#pred"); - query = service.query(plist, ProspectorConstants.COUNT, TripleValueType.subjectpredicate.name(), queryTerms, XMLSchema.STRING.stringValue(), auths) - assertEquals(1, query.size()) -// assertEquals( -// new IndexEntry(index: ProspectorConstants.COUNT, data: "urn:gem:etype#1234" + "\u0000" + "urn:gem#pred", dataType: XMLSchema.STRING.stringValue(), -// tripleValueType: TripleValueType.subjectpredicate, visibility: "", count: -1, timestamp: plist.get(0)), -// query.get(0)) - - queryTerms = new ArrayList(); - queryTerms.add("urn:gem#pred"); - queryTerms.add("12"); - query = service.query(plist, ProspectorConstants.COUNT, TripleValueType.predicateobject.name(), queryTerms, XMLSchema.STRING.stringValue(), auths) - assertEquals(1, query.size()) -// assertEquals( -// new IndexEntry(index: ProspectorConstants.COUNT, data: "urn:gem#pred" + "\u0000" + "12", dataType: XMLSchema.STRING.stringValue(), -// tripleValueType: TripleValueType.predicateobject, visibility: "", count: -1, timestamp: plist.get(0)), -// query.get(0)) - - queryTerms = new ArrayList(); - queryTerms.add("urn:gem:etype#1234"); - queryTerms.add("mydata1"); - query = service.query(plist, ProspectorConstants.COUNT, TripleValueType.subjectobject.name(), queryTerms, XMLSchema.STRING.stringValue(), auths) - - assertEquals(1, query.size()) -// assertEquals( -// new IndexEntry(index: ProspectorConstants.COUNT, data: "urn:gem:etype#1234" + "\u0000" + "mydata1", dataType: XMLSchema.STRING.stringValue(), -// tripleValueType: TripleValueType.subjectobject, visibility: "", count: -1, timestamp: plist.get(0)), -// query.get(0)) - - //should be in a teardown method - connector.tableOperations().delete(outtable) - } - - private void debugTable(def connector, String table) { - connector.createScanner(table, new Authorizations((String[]) ["U", "FOUO"])).iterator().each { - println it - } - } -} diff --git a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy b/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy deleted file mode 100644 index 5bbbee827..000000000 --- a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.prospector.service - -import com.google.common.collect.Iterators -import mvm.rya.accumulo.AccumuloRdfConfiguration -import mvm.rya.accumulo.AccumuloRyaDAO -import mvm.rya.api.domain.RyaStatement -import mvm.rya.api.domain.RyaType -import mvm.rya.api.domain.RyaURI -import mvm.rya.api.persist.RdfEvalStatsDAO -import mvm.rya.prospector.mr.Prospector -import org.apache.accumulo.core.client.Instance -import org.apache.accumulo.core.client.mock.MockInstance -import org.apache.accumulo.core.security.Authorizations -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.fs.Path -import org.apache.hadoop.util.ToolRunner -import org.junit.Test -import org.openrdf.model.impl.URIImpl -import org.openrdf.model.vocabulary.XMLSchema - -import static org.junit.Assert.assertEquals -import org.openrdf.model.impl.LiteralImpl -import org.openrdf.model.Value - -/** - * Date: 1/26/13 - * Time: 3:00 PM - */ -class ProspectorServiceEvalStatsDAOTest { - - @Test - public void testCount() throws Exception { - - Instance mock = new MockInstance("accumulo"); - - def connector = mock.getConnector("user", "pass".bytes) - def intable = "rya_spo" - def outtable = "rya_prospects" - if (connector.tableOperations().exists(outtable)) - connector.tableOperations().delete(outtable) - connector.tableOperations().create(outtable) - - AccumuloRyaDAO ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConnector(connector); - ryaDAO.init() - - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata1"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata2"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("12"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1235"), new RyaURI("urn:gem#pred"), new RyaType(XMLSchema.INTEGER, "12"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1235"), new RyaURI("urn:gem#pred1"), new RyaType("12"))) - - def confFile = "stats_cluster_config.xml" - def confPath = new Path(getClass().getClassLoader().getResource(confFile).toString()) - def args = (String[]) [confPath]; - ToolRunner.run(new Prospector(), args); - debugTable(connector, outtable) - - def scanner = connector.createScanner(outtable, new Authorizations("U", "FOUO")) - def iter = scanner.iterator() -// assertEquals(11, Iterators.size(iter)) - - ryaDAO.destroy() - - def conf = new Configuration() - conf.addResource(confPath) -// debugTable(connector, outtable) - - def rdfConf = new AccumuloRdfConfiguration(conf) - rdfConf.setAuths("U","FOUO") - def evalDao = new ProspectorServiceEvalStatsDAO(connector, rdfConf) - evalDao.init() - - List values = new ArrayList(); - values.add( new URIImpl("urn:gem#pred")); - - def count = evalDao.getCardinality(rdfConf, RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE, values) - assertEquals(4.0, count, 0.001); - - values = new ArrayList(); - values.add( new LiteralImpl("mydata1")); - - count = evalDao.getCardinality(rdfConf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values); - assertEquals(1.0, count, 0.001); - - values = new ArrayList(); - values.add( new LiteralImpl("mydata3")); - - count = evalDao.getCardinality(rdfConf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values); - assertEquals(-1.0, count, 0.001); - - //should be in a teardown method - connector.tableOperations().delete(outtable) - } - - @Test - public void testNoAuthsCount() throws Exception { - - Instance mock = new MockInstance("accumulo"); - def connector = mock.getConnector("user", "pass".bytes) - def intable = "rya_spo" - def outtable = "rya_prospects" - if (connector.tableOperations().exists(outtable)) - connector.tableOperations().delete(outtable) - connector.tableOperations().create(outtable) - connector.securityOperations().createUser("user", "pass".bytes, new Authorizations("U", "FOUO")) - - AccumuloRyaDAO ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConnector(connector); - ryaDAO.init() - - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata1"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata2"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("12"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1235"), new RyaURI("urn:gem#pred"), new RyaType(XMLSchema.INTEGER, "12"))) - ryaDAO.add(new RyaStatement(new RyaURI("urn:gem:etype#1235"), new RyaURI("urn:gem#pred1"), new RyaType("12"))) - - def confFile = "stats_cluster_config.xml" - def confPath = new Path(getClass().getClassLoader().getResource(confFile).toString()) - def args = (String[]) [confPath]; - ToolRunner.run(new Prospector(), args); - - def scanner = connector.createScanner(outtable, new Authorizations("U", "FOUO")) - def iter = scanner.iterator() -// assertEquals(11, Iterators.size(iter)) - - ryaDAO.destroy() - - def conf = new Configuration() - conf.addResource(confPath) - - def rdfConf = new AccumuloRdfConfiguration(conf) -// rdfConf.setAuths("U","FOUO") - def evalDao = new ProspectorServiceEvalStatsDAO(connector, rdfConf) - evalDao.init() - - - List values = new ArrayList(); - values.add( new URIImpl("urn:gem#pred")); - def count = evalDao.getCardinality(rdfConf, RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE, values) - assertEquals(4.0, count, 0.001); - - values = new ArrayList(); - values.add( new LiteralImpl("mydata1")); - count = evalDao.getCardinality(rdfConf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values); - assertEquals(1.0, count, 0.001); - - values = new ArrayList(); - values.add( new LiteralImpl("mydata3")); - - count = evalDao.getCardinality(rdfConf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values); - assertEquals(-1.0, count, 0.001); - - //should be in a teardown method - connector.tableOperations().delete(outtable) - } - - private void debugTable(def connector, String table) { - connector.createScanner(table, new Authorizations((String[]) ["U", "FOUO"])).iterator().each { - println it - } - } -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java deleted file mode 100644 index f40b63fc3..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java +++ /dev/null @@ -1,592 +0,0 @@ -package mvm.rya.joinselect; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.math.BigDecimal; -import java.math.MathContext; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.layout.TablePrefixLayoutStrategy; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.joinselect.AccumuloSelectivityEvalDAO; -import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.admin.TableOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.collect.Lists; - -public class AccumuloSelectivityEvalDAOTest { - - private static final String DELIM = "\u0000"; - private final byte[] EMPTY_BYTE = new byte[0]; - private final Value EMPTY_VAL = new Value(EMPTY_BYTE); - - private String q1 = ""// - + "SELECT ?h " // - + "{" // - + " ?h . "// - + " ?h ."// - + " ?h ."// - + " ?h . "// - + "}";// - - private String q2 = ""// - + "SELECT ?h " // - + "{" // - + " ?h . "// - + " ?h ."// - + " ?h ."// - + " ?h . "// - + "}";// - - private String q3 = ""// - + "SELECT ?h " // - + "{" // - + " ?h. "// - + " ?h ."// - + " ?h ."// - + " ?h . "// - + "}";// - - private Connector conn; - AccumuloRdfConfiguration arc; - RdfEvalStatsDAO res; - BatchWriterConfig config; - Instance mock; - - @Before - public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException { - - mock = new MockInstance("accumulo"); - PasswordToken pToken = new PasswordToken("pass".getBytes()); - conn = mock.getConnector("user", pToken); - - config = new BatchWriterConfig(); - config.setMaxMemory(1000); - config.setMaxLatency(1000, TimeUnit.SECONDS); - config.setMaxWriteThreads(10); - - if (conn.tableOperations().exists("rya_prospects")) { - conn.tableOperations().delete("rya_prospects"); - } - if (conn.tableOperations().exists("rya_selectivity")) { - conn.tableOperations().delete("rya_selectivity"); - } - - arc = new AccumuloRdfConfiguration(); - res = new ProspectorServiceEvalStatsDAO(conn, arc); - arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy()); - arc.setMaxRangesForScanner(300); - - } - - @Test - public void testInitialize() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - TableOperations tos = conn.tableOperations(); - Assert.assertTrue(tos.exists("rya_prospects") && tos.exists("rya_selectivity")); - Assert.assertTrue(accc.isInitialized()); - Assert.assertTrue(accc.getConf().equals(arc)); - Assert.assertTrue(accc.getConnector().equals(conn)); - Assert.assertTrue(accc.getRdfEvalDAO().equals(res)); - - } - - @Test - public void testCardinalityQuery1() throws AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException, - MalformedQueryException { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setRdfEvalDAO(res); - accc.setConnector(conn); - accc.init(); - - BatchWriter bw = conn.createBatchWriter("rya_prospects", config); - - BatchWriter bw1 = conn.createBatchWriter("rya_selectivity", config); - Mutation m = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m.put(new Text("FullTableCardinality"), new Text("600"), EMPTY_VAL); - List list = Lists.newArrayList(); - list.add(m); - bw1.addMutations(list); - bw1.close(); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - List mList = new ArrayList(); - Mutation m1, m2, m3; - - Integer tempInt; - Integer tempInt2; - - for (int i = 1; i < 7; i++) { - tempInt = 5 * i; - tempInt2 = 10 - i; - m1 = new Mutation(s1 + DELIM + i); - m1.put(new Text("count"), new Text(""), new Value((tempInt.toString()).getBytes())); - m2 = new Mutation(s2 + DELIM + (7 - i)); - m2.put(new Text("count"), new Text(""), new Value((tempInt.toString()).getBytes())); - m3 = new Mutation(s3 + DELIM + (10 + i)); - m3.put(new Text("count"), new Text(""), new Value((tempInt2.toString()).getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - } - - bw.addMutations(mList); - bw.close(); - - List spList = getSpList(q1); - long c1 = accc.getCardinality(arc, spList.get(0)); - long c2 = accc.getCardinality(arc, spList.get(1)); - long c3 = accc.getCardinality(arc, spList.get(2)); - long c4 = accc.getCardinality(arc, spList.get(3)); - - Assert.assertTrue(c1 == (long) 0); - Assert.assertTrue(c2 == (long) 5); - Assert.assertTrue(c3 == (long) 30); - Assert.assertTrue(c4 == (long) 9); - - } - - @Test - public void testCardinalityQuery2() throws AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException, - MalformedQueryException { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw = conn.createBatchWriter("rya_prospects", config); - - BatchWriter bw1 = conn.createBatchWriter("rya_selectivity", config); - Mutation m = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m.put(new Text("FullTableCardinality"), new Text("600"), EMPTY_VAL); - List list = Lists.newArrayList(); - list.add(m); - bw1.addMutations(list); - bw1.close(); - - String s1 = "subjectobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "subjectobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "subjectobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - List mList = new ArrayList(); - Mutation m1, m2, m3; - - Integer tempInt; - Integer tempInt2; - - for (int i = 1; i < 7; i++) { - tempInt = 5 * i; - tempInt2 = 10 - i; - m1 = new Mutation(s1 + DELIM + i); - m1.put(new Text("count"), new Text(""), new Value((tempInt.toString()).getBytes())); - m2 = new Mutation(s2 + DELIM + (7 - i)); - m2.put(new Text("count"), new Text(""), new Value((tempInt.toString()).getBytes())); - m3 = new Mutation(s3 + DELIM + (10 + i)); - m3.put(new Text("count"), new Text(""), new Value((tempInt2.toString()).getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - } - bw.addMutations(mList); - bw.close(); - - List spList = getSpList(q2); - long c1 = accc.getCardinality(arc, spList.get(0)); - long c2 = accc.getCardinality(arc, spList.get(1)); - long c3 = accc.getCardinality(arc, spList.get(2)); - long c4 = accc.getCardinality(arc, spList.get(3)); - - Assert.assertTrue(c1 == (long) 0); - Assert.assertTrue(c2 == (long) 5); - Assert.assertTrue(c3 == (long) 30); - Assert.assertTrue(c4 == (long) 9); - - } - - @Test - public void testJoinCardinalityQuery1() throws AccumuloException, AccumuloSecurityException, TableExistsException, - TableNotFoundException, MalformedQueryException { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", - "predicatesubject"); - Mutation m1, m2, m3, m4; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("20".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("15".getBytes())); - m3 = new Mutation(s3 + DELIM + "3"); - m3.put(new Text("count"), new Text(""), new Value("10".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - - bw1.addMutations(mList); - bw1.close(); - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - int i = 30; - int j = 60; - int k = 90; - Long count1; - Long count2; - Long count3; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - i = 2 * i; - j = 2 * j; - k = 2 * k; - } - m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m4.put(new Text("FullTableCardinality"), new Text("600"), EMPTY_VAL); - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - bw2.addMutations(mList2); - bw2.close(); - - Scanner scan = conn.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); - - } - - List spList = getSpList(q1); - System.out.println(spList); - List jCardList = new ArrayList(); - - for (StatementPattern sp1 : spList) { - for (StatementPattern sp2 : spList) { - jCardList.add(accc.getJoinSelect(arc, sp1, sp2)); - } - } - - System.out.println("Join cardinalities are " + jCardList); - - Assert.assertEquals(0, jCardList.get(0), .001); - Assert.assertEquals(0, jCardList.get(3), .001); - Assert.assertEquals(6.0 / 600, jCardList.get(5), .001); - Assert.assertEquals(6.0 / 600, jCardList.get(6), .001); - Assert.assertEquals(0 / 600, jCardList.get(8), .001); - Assert.assertEquals(6.0 / 600, jCardList.get(7), .001); - Assert.assertEquals(15.0 / 600, jCardList.get(11), .001); - Assert.assertEquals(6.0 / 600, jCardList.get(13), .001); - Assert.assertEquals(10.0 / 600, jCardList.get(15), .001); - - Assert.assertTrue(jCardList.get(0) == 0); - Assert.assertTrue(jCardList.get(3) == 0); - Assert.assertTrue(jCardList.get(5) == .01); - Assert.assertTrue(jCardList.get(6) == .01); - Assert.assertTrue(jCardList.get(8) == 0); - Assert.assertTrue(jCardList.get(7) == (6.0 / 600)); - Assert.assertTrue(jCardList.get(11) == (1.0 / 40)); - Assert.assertTrue(jCardList.get(13) == .01); - Assert.assertTrue(jCardList.get(15) == (10.0 / 600)); - - } - - @Test - public void testJoinCardinalityQuery2() throws AccumuloException, AccumuloSecurityException, TableExistsException, - TableNotFoundException, MalformedQueryException { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "subjectobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "subjectobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "subjectobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s4 = "objectsubject" + DELIM + "uri:dog" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label"; - String s5 = "objectsubject" + DELIM + "uri:cat" + DELIM + "uri:barksAt"; - String s6 = "objectsubject" + DELIM + "uri:hydrant" + DELIM + "uri:peesOn"; - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", - "predicatesubject"); - List mList = new ArrayList(); - List mList2 = new ArrayList(); - Mutation m1, m2, m3, m4; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("4".getBytes())); - m3 = new Mutation(s3 + DELIM + "3"); - m3.put(new Text("count"), new Text(""), new Value("6".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - - bw1.addMutations(mList); - bw1.close(); - - m1 = new Mutation(s4); - m2 = new Mutation(s5); - m3 = new Mutation(s6); - int i = 5; - int j = 6; - int k = 7; - Long count1; - Long count2; - Long count3; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - i = 2 * i; - j = 2 * j; - k = 2 * k; - } - m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m4.put(new Text("FullTableCardinality"), new Text("600"), EMPTY_VAL); - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - bw2.addMutations(mList2); - bw2.close(); - - List spList = getSpList(q2); - // System.out.println(spList); - List jCardList = new ArrayList(); - - for (StatementPattern sp1 : spList) { - for (StatementPattern sp2 : spList) { - jCardList.add(accc.getJoinSelect(arc, sp1, sp2)); - } - } - - System.out.println("Join cardinalities are " + jCardList); - - Assert.assertEquals(0, jCardList.get(0), .001); - Assert.assertEquals(0, jCardList.get(3), .001); - Assert.assertEquals(2.0 / 600, jCardList.get(5), .001); - Assert.assertEquals(4.0 / 600, jCardList.get(6), .001); - Assert.assertEquals(.0 / 600, jCardList.get(8), .001); - Assert.assertEquals(6. / 600, jCardList.get(7), .001); - Assert.assertEquals(6. / 600, jCardList.get(11), .001); - - Assert.assertTrue(jCardList.get(0) == 0); - Assert.assertTrue(jCardList.get(3) == 0); - Assert.assertTrue(jCardList.get(5) == (1.0 / 300)); - Assert.assertTrue(jCardList.get(6) == (4.0 / 600)); - Assert.assertTrue(jCardList.get(8) == 0); - Assert.assertTrue(jCardList.get(7) == .01); - Assert.assertTrue(jCardList.get(11) == .01); - - } - - @Test - public void testJoinCardinalityQuery3() throws AccumuloException, AccumuloSecurityException, TableExistsException, - TableNotFoundException, MalformedQueryException { - - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "subjectpredicate" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "subjectobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s4 = "subjectpredicate" + DELIM + "uri:howlsAt" + DELIM + "uri:moon"; - String s5 = "objectsubject" + DELIM + "uri:cat" + DELIM + "uri:barksAt"; - - List sList = Arrays.asList("subjectobject", "objectsubject", "objectobject", "objectpredicate", "subjectpredicate", - "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - List mList = new ArrayList(); - List mList2 = new ArrayList(); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("15".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("11".getBytes())); - m3 = new Mutation(s3 + DELIM + "3"); - m3.put(new Text("count"), new Text(""), new Value("13".getBytes())); - m4 = new Mutation(s4 + DELIM + "8"); - m4.put(new Text("count"), new Text(""), new Value("20".getBytes())); - m5 = new Mutation(s4 + DELIM + "2"); - m5.put(new Text("count"), new Text(""), new Value("10".getBytes())); - - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - - m1 = new Mutation(s1); - m2 = new Mutation(s5); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - int i = 5; - int j = 6; - int k = 7; - int l = 8; - Long count1; - Long count2; - Long count3; - Long count4; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - count4 = (long) l; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m4.put(new Text(s), new Text(count4.toString()), EMPTY_VAL); - i = 2 * i; - j = 2 * j; - k = 2 * k; - l = 2 * l; - } - m5 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m5.put(new Text("FullTableCardinality"), new Text("600"), EMPTY_VAL); - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - mList2.add(m5); - bw2.addMutations(mList2); - bw2.close(); - - List spList = getSpList(q3); - System.out.println(spList); - List jCardList = new ArrayList(); - - for (StatementPattern sp1 : spList) { - for (StatementPattern sp2 : spList) { - jCardList.add(accc.getJoinSelect(arc, sp1, sp2)); - } - } - - MathContext mc = new MathContext(3); - - Assert.assertEquals(3.2 / 600, jCardList.get(0), .001); - Assert.assertEquals(0.5384615384615384 / 600, jCardList.get(3), .001); - Assert.assertEquals(1.3333333333333333 / 600, jCardList.get(5), .001); - Assert.assertEquals(2.6666666666666665 / 600, jCardList.get(6), .001); - Assert.assertEquals(6.4 / 600, jCardList.get(8), .001); - Assert.assertEquals(13. / 600, jCardList.get(15), .001); - - Assert.assertTrue(new BigDecimal(jCardList.get(2)).round(mc).equals(new BigDecimal(64.0 / 6000).round(mc))); - Assert.assertTrue(new BigDecimal(jCardList.get(7)).round(mc).equals(new BigDecimal(7.0 / 7800).round(mc))); - Assert.assertTrue(new BigDecimal(jCardList.get(14)).round(mc).equals(new BigDecimal(112.0 / 7800).round(mc))); - - } - - private List getSpList(String query) throws MalformedQueryException { - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(query, null); - TupleExpr te = pq.getTupleExpr(); - - return StatementPatternCollector.process(te); - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java deleted file mode 100644 index 4a57f312d..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java +++ /dev/null @@ -1,141 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import mvm.rya.joinselect.mr.utils.CardList; -import mvm.rya.joinselect.mr.utils.TripleEntry; - -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mrunit.mapreduce.ReduceDriver; -import org.junit.Test; - -public class CardinalityIdentityReducerTest { - - private static final String DELIM = "\u0000"; - - @Test - public void testCIReducerOneConstant() throws InterruptedException, IOException { - - TripleEntry te = new TripleEntry(new Text("urn:gem:etype#1234"), new Text(""), new Text("subject"), new Text(""), new Text("object")); - CardList cL1 = new CardList(1, 2, 3, 0, 0, 0); - CardList cL2 = new CardList(4, 5, 6, 0, 0, 0); - CardList cl = new CardList(5, 7, 9, 0, 0, 0); - List list = new ArrayList(); - list.add(cL1); - list.add(cL2); - - Text row = new Text(te.getFirstPos().toString() + DELIM + te.getFirst().toString()); - Mutation m1 = new Mutation(row); - m1.put(new Text(te.getKeyPos().toString() + "subject"), new Text(cl.getcardS().toString()), new Value(new byte[0])); - Mutation m2 = new Mutation(row); - m2.put(new Text(te.getKeyPos().toString() + "predicate"), new Text(cl.getcardP().toString()), new Value(new byte[0])); - Mutation m3 = new Mutation(row); - m3.put(new Text(te.getKeyPos().toString() + "object"), new Text(cl.getcardO().toString()), new Value(new byte[0])); - Text table = new Text(""); - - new ReduceDriver().withReducer(new JoinSelectStatisticsSum.CardinalityIdentityReducer()).withInput(te, list) - .withOutput(table, m1).withOutput(table, m2).withOutput(table, m3).runTest(); - - } - - @Test - public void testCIReducerTwoConstant() throws InterruptedException, IOException { - - TripleEntry te = new TripleEntry(new Text("urn:gem:etype#1234"), new Text("urn:gem#pred"), new Text("subject"), new Text("predicate"), new Text("object")); - CardList cL1 = new CardList(1, 2, 3, 0, 0, 0); - CardList cL2 = new CardList(4, 5, 6, 0, 0, 0); - CardList cl = new CardList(5, 7, 9, 0, 0, 0); - List list = new ArrayList(); - list.add(cL1); - list.add(cL2); - - Text row = new Text(te.getFirstPos().toString() + te.getSecondPos().toString() + DELIM + te.getFirst().toString() + DELIM + te.getSecond()); - Mutation m1 = new Mutation(row); - m1.put(new Text(te.getKeyPos().toString() + "subject"), new Text(cl.getcardS().toString()), new Value(new byte[0])); - Mutation m2 = new Mutation(row); - m2.put(new Text(te.getKeyPos().toString() + "predicate"), new Text(cl.getcardP().toString()), new Value(new byte[0])); - Mutation m3 = new Mutation(row); - m3.put(new Text(te.getKeyPos().toString() + "object"), new Text(cl.getcardO().toString()), new Value(new byte[0])); - Text table = new Text(""); - - new ReduceDriver().withReducer(new JoinSelectStatisticsSum.CardinalityIdentityReducer()).withInput(te, list) - .withOutput(table, m1).withOutput(table, m2).withOutput(table, m3).runTest(); - - } - - @Test - public void testJoinTwoVars() throws InterruptedException, IOException { - - TripleEntry te = new TripleEntry(new Text("urn:gem:etype#1234"), new Text(""), new Text("subject"), new Text(""), new Text("predicateobject")); - CardList cL1 = new CardList(0, 0, 0, 1, 2, 3); - CardList cL2 = new CardList(0, 0, 0, 4, 5, 6); - CardList cl = new CardList(0, 0, 0, 5, 7, 9); - List list = new ArrayList(); - list.add(cL1); - list.add(cL2); - - Text row = new Text(te.getFirstPos().toString() + DELIM + te.getFirst().toString()); - Mutation m1 = new Mutation(row); - m1.put(new Text(te.getKeyPos().toString() + "subjectpredicate"), new Text(cl.getcardSP().toString()), new Value(new byte[0])); - Mutation m2 = new Mutation(row); - m2.put(new Text(te.getKeyPos().toString() + "predicateobject"), new Text(cl.getcardPO().toString()), new Value(new byte[0])); - Mutation m3 = new Mutation(row); - m3.put(new Text(te.getKeyPos().toString() + "objectsubject"), new Text(cl.getcardSO().toString()), new Value(new byte[0])); - Text table = new Text(""); - - new ReduceDriver().withReducer(new JoinSelectStatisticsSum.CardinalityIdentityReducer()).withInput(te, list) - .withOutput(table, m1).withOutput(table, m2).withOutput(table, m3).runTest(); - - } - - @Test - public void testJoinTwoVarsReverseOrder() throws InterruptedException, IOException { - - TripleEntry te = new TripleEntry(new Text("urn:gem:etype#1234"), new Text(""), new Text("subject"), new Text(""), new Text("objectpredicate")); - CardList cL1 = new CardList(0, 0, 0, 1, 2, 3); - CardList cL2 = new CardList(0, 0, 0, 4, 5, 6); - CardList cl = new CardList(0, 0, 0, 5, 7, 9); - List list = new ArrayList(); - list.add(cL1); - list.add(cL2); - - Text row = new Text(te.getFirstPos().toString() + DELIM + te.getFirst().toString()); - Mutation m1 = new Mutation(row); - m1.put(new Text("predicateobject" + "predicatesubject"), new Text(cl.getcardSP().toString()), new Value(new byte[0])); - Mutation m2 = new Mutation(row); - m2.put(new Text("predicateobject" + "objectpredicate"), new Text(cl.getcardPO().toString()), new Value(new byte[0])); - Mutation m3 = new Mutation(row); - m3.put(new Text("predicateobject" + "subjectobject"), new Text(cl.getcardSO().toString()), new Value(new byte[0])); - Text table = new Text(""); - - new ReduceDriver().withReducer(new JoinSelectStatisticsSum.CardinalityIdentityReducer()).withInput(te, list) - .withOutput(table, m1).withOutput(table, m2).withOutput(table, m3).runTest(); - - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java deleted file mode 100644 index 38183000f..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java +++ /dev/null @@ -1,76 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; - -import mvm.rya.joinselect.mr.JoinSelectProspectOutput; -import mvm.rya.joinselect.mr.utils.CardinalityType; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.TripleCard; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mrunit.mapreduce.MapDriver; -import org.junit.Test; - -public class CardinalityMapperTest { - - private static final String DELIM = "\u0000"; - - public enum TripleValueType { - subject, predicate, object, subjectpredicate, predicateobject, subjectobject - } - - @Test - public void testOutput() throws InterruptedException, IOException { - - String s = "urn:gem:etype#1234"; - String p = "urn:gem#pred"; - - Text t1 = new Text(TripleValueType.subject.name() + DELIM + s + DELIM + 1); - Text t2 = new Text(TripleValueType.predicate.name() + DELIM + p + DELIM + 2); - Text t3 = new Text(TripleValueType.subjectpredicate.name() + DELIM + s + DELIM + p + DELIM + 3); - - byte[] b = new byte[0]; - byte[] c = "25".getBytes(); - byte[] d = "47".getBytes(); - byte[] e = "15".getBytes(); - - Key key1 = new Key(t1.getBytes(), b, b, b, 1); - Key key2 = new Key(t2.getBytes(), b, b, b, 1); - Key key3 = new Key(t3.getBytes(), b, b, b, 1); - Value val1 = new Value(c); - Value val2 = new Value(d); - Value val3 = new Value(e); - - // System.out.println("Keys are " + key1 + " and " + key2); - - new MapDriver().withMapper(new JoinSelectProspectOutput.CardinalityMapper()).withInput(key1, val1) - .withInput(key2, val2).withInput(key3, val3).withOutput(new CompositeType(s, 1), new TripleCard(new CardinalityType(25, "subject", 1))) - .withOutput(new CompositeType(p, 1), new TripleCard(new CardinalityType(47, "predicate", 2))) - .withOutput(new CompositeType(s + DELIM + p, 1), new TripleCard(new CardinalityType(15, "subjectpredicate", 3))).runTest(); - - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java deleted file mode 100644 index 705edb13f..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java +++ /dev/null @@ -1,64 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; - -import mvm.rya.joinselect.mr.FullTableSize; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver; -import org.junit.Test; - -//TODO fix table names! - -public class FullTableSizeTest { - - private static final String DELIM = "\u0000"; - - @Test - public void testFullTableSize() throws IOException { - - Value value = new Value(new byte[0]); - - Mutation m = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m.put(new Text("FullTableCardinality"), new Text("15"), new Value(new byte[0])); - - new MapReduceDriver() - .withMapper(new FullTableSize.FullTableMapper()).withInput(new Key(new Text("entry1")), value) - .withInput(new Key(new Text("entry2")), value).withInput(new Key(new Text("entry3")), value) - .withInput(new Key(new Text("entry4")), value).withInput(new Key(new Text("entry5")), value) - .withInput(new Key(new Text("entry6")), value).withInput(new Key(new Text("entry7")), value) - .withInput(new Key(new Text("entry8")), value).withInput(new Key(new Text("entry9")), value) - .withInput(new Key(new Text("entry10")), value).withInput(new Key(new Text("entry11")), value) - .withInput(new Key(new Text("entry12")), value).withInput(new Key(new Text("entry13")), value) - .withInput(new Key(new Text("entry14")), value).withInput(new Key(new Text("entry15")), value) - .withCombiner(new FullTableSize.FullTableCombiner()).withReducer(new FullTableSize.FullTableReducer()) - .withOutput(new Text(""), m).runTest(); - - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java deleted file mode 100644 index be03565de..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java +++ /dev/null @@ -1,124 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import mvm.rya.joinselect.mr.JoinSelectAggregate; -import mvm.rya.joinselect.mr.utils.CardList; -import mvm.rya.joinselect.mr.utils.CardinalityType; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.TripleCard; -import mvm.rya.joinselect.mr.utils.TripleEntry; - -import org.apache.hadoop.mrunit.mapreduce.ReduceDriver; -import org.junit.Test; - -public class JoinReducerTest { - - private static final String DELIM = "\u0000"; - - @Test - public void testSingleConstCard() throws InterruptedException, IOException { - - CompositeType ct = new CompositeType("urn:gem:etype#1234", 1); - TripleEntry te = new TripleEntry("urn:gem#pred", "urn:gem:etype#4567", "predicate", "object", "subject"); - CardinalityType c5 = new CardinalityType(45, "object", 0); - CardinalityType c1 = new CardinalityType(25, "subject", 2); - CardinalityType c2 = new CardinalityType(27, "predicate", 2); - CardinalityType c3 = new CardinalityType(29, "object", 2); - CardinalityType c4 = new CardinalityType(31, "predicate", 1); - List list = new ArrayList(); - list.add(new TripleCard(c1)); - list.add(new TripleCard(c2)); - list.add(new TripleCard(c3)); - list.add(new TripleCard(c4)); - list.add(new TripleCard(c5)); - list.add(new TripleCard(te)); - System.out.println("List is " + list); - - new ReduceDriver().withReducer(new JoinSelectAggregate.JoinReducer()).withInput(ct, list) - .withOutput(te, new CardList(25, 31, 45, 0, 0, 0)).runTest(); - - } - - @Test - public void testTwoTripleEntry() throws InterruptedException, IOException { - - CompositeType ct = new CompositeType("urn:gem:etype#1234", 1); - TripleEntry te1 = new TripleEntry("urn:gem#pred", "urn:gem:etype#4567", "predicate", "object", "subject"); - TripleEntry te2 = new TripleEntry("urn:gem#8910", "urn:gem:etype#4567", "subject", "predicate", "object"); - CardinalityType c5 = new CardinalityType(45, "object", 0); - CardinalityType c1 = new CardinalityType(25, "subject", 2); - CardinalityType c2 = new CardinalityType(27, "predicate", 2); - CardinalityType c3 = new CardinalityType(29, "object", 2); - CardinalityType c4 = new CardinalityType(31, "predicate", 1); - List list = new ArrayList(); - list.add(new TripleCard(c1)); - list.add(new TripleCard(c2)); - list.add(new TripleCard(c3)); - list.add(new TripleCard(c4)); - list.add(new TripleCard(c5)); - list.add(new TripleCard(te1)); - list.add(new TripleCard(te2)); - System.out.println("List is " + list); - - new ReduceDriver().withReducer(new JoinSelectAggregate.JoinReducer()).withInput(ct, list) - .withOutput(te1, new CardList(25, 31, 45, 0, 0, 0)).withOutput(te2, new CardList(25, 31, 45, 0, 0, 0)).runTest(); - - } - - @Test - public void testTwoConstCard() throws InterruptedException, IOException { - - CompositeType ct1 = new CompositeType("urn:gem#pred" + DELIM + "urn:gem:etype#1234", 1); - TripleEntry te1 = new TripleEntry("uri:testSubject", "", "subject", "", "predicateobject"); - TripleEntry te2 = new TripleEntry("uri:testSubject", "", "subject", "", "objectpredicate"); - - CardinalityType c5 = new CardinalityType(45, "subjectobject", 0); - CardinalityType c1 = new CardinalityType(25, "subjectobject", 2); - CardinalityType c2 = new CardinalityType(27, "predicateobject", 5); - CardinalityType c3 = new CardinalityType(29, "predicateobject", 2); - CardinalityType c4 = new CardinalityType(31, "subjectpredicate", 1); - CardinalityType c6 = new CardinalityType(56, "subjectpredicate", 2); - - List list1 = new ArrayList(); - - list1.add(new TripleCard(c1)); - list1.add(new TripleCard(c2)); - list1.add(new TripleCard(c3)); - list1.add(new TripleCard(c4)); - list1.add(new TripleCard(c5)); - list1.add(new TripleCard(c6)); - list1.add(new TripleCard(te1)); - list1.add(new TripleCard(te2)); - - // System.out.println("List is " + list); - - new ReduceDriver().withReducer(new JoinSelectAggregate.JoinReducer()).withInput(ct1, list1) - .withOutput(te1, new CardList(0, 0, 0, 31, 29, 45)).withOutput(te2, new CardList(0, 0, 0, 31, 29, 45)).runTest(); - - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java deleted file mode 100644 index 0d53b90f2..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java +++ /dev/null @@ -1,94 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.Map; - -import mvm.rya.joinselect.mr.JoinSelectSpoTableOutput; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.TripleCard; -import mvm.rya.joinselect.mr.utils.TripleEntry; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolver; -import mvm.rya.api.resolver.triple.TripleRowResolverException; -import mvm.rya.api.resolver.triple.impl.WholeRowTripleResolver; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mrunit.mapreduce.MapDriver; -import org.junit.Test; - -public class JoinSelectMapperTest { - - private static final String DELIM = "\u0000"; - - @Test - public void testOutput() throws TripleRowResolverException, IOException { - - RyaStatement rya = new RyaStatement(new RyaURI("urn:gem:etype#1234"), new RyaURI("urn:gem#pred"), new RyaType("mydata1")); - Text s = new Text(rya.getSubject().getData()); - Text p = new Text(rya.getPredicate().getData()); - Text o = new Text(rya.getObject().getData()); - Text sp = new Text(rya.getSubject().getData() + DELIM + rya.getPredicate().getData()); - Text so = new Text(rya.getSubject().getData() + DELIM + rya.getObject().getData()); - Text po = new Text(rya.getPredicate().getData() + DELIM + rya.getObject().getData()); - Text ps = new Text(rya.getPredicate().getData() + DELIM + rya.getSubject().getData()); - Text op = new Text(rya.getObject().getData() + DELIM + rya.getPredicate().getData()); - Text os = new Text(rya.getObject().getData() + DELIM + rya.getSubject().getData()); - - TripleEntry t1 = new TripleEntry(s, p, new Text("subject"), new Text("predicate"), new Text("object")); - TripleEntry t2 = new TripleEntry(p, o, new Text("predicate"), new Text("object"), new Text("subject")); - TripleEntry t3 = new TripleEntry(o, s, new Text("object"), new Text("subject"), new Text("predicate")); - TripleEntry t4 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("subjectpredicate")); - TripleEntry t5 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("objectsubject")); - TripleEntry t6 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("predicateobject")); - TripleEntry t7 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("objectpredicate")); - TripleEntry t8 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("subjectobject")); - TripleEntry t9 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("predicatesubject")); - - TripleRowResolver trr = new WholeRowTripleResolver(); - Map map = trr.serialize(rya); - System.out.println(map); - TripleRow tr = map.get(TABLE_LAYOUT.SPO); - System.out.println("Triple row is" + tr); - System.out.println("ColumnV is " + tr.getTimestamp()); - byte[] b = new byte[0]; - Key key = new Key(tr.getRow(), tr.getColumnFamily(), tr.getColumnQualifier(), b, 1); - Value val = new Value(b); - - new MapDriver().withMapper(new JoinSelectSpoTableOutput.JoinSelectMapper()).withInput(key, val) - .withOutput(new CompositeType(o, new IntWritable(2)), new TripleCard(t1)).withOutput(new CompositeType(s, new IntWritable(2)), new TripleCard(t2)) - .withOutput(new CompositeType(p, new IntWritable(2)), new TripleCard(t3)).withOutput(new CompositeType(po, new IntWritable(2)), new TripleCard(t6)) - .withOutput(new CompositeType(so, new IntWritable(2)), new TripleCard(t5)).withOutput(new CompositeType(sp, new IntWritable(2)), new TripleCard(t4)) - .withOutput(new CompositeType(op, new IntWritable(2)), new TripleCard(t7)).withOutput(new CompositeType(os, new IntWritable(2)), new TripleCard(t8)) - .withOutput(new CompositeType(ps, new IntWritable(2)), new TripleCard(t9)).runTest(); - - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java deleted file mode 100644 index 19c90a3e4..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java +++ /dev/null @@ -1,89 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import static org.junit.Assert.*; - -import org.junit.Test; - -import java.io.IOException; - -import mvm.rya.joinselect.mr.JoinSelectProspectOutput; -import mvm.rya.joinselect.mr.utils.CardinalityType; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.TripleCard; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mrunit.mapreduce.MapDriver; -import org.junit.Test; - -public class JoinSelectProspectOutputTest { - - private static final String DELIM = "\u0000"; - - public enum TripleValueType { - subject, predicate, object, subjectpredicate, predicateobject, subjectobject - } - - @Test - public void testOutput() throws InterruptedException, IOException { - - String s = "urn:gem:etype#1234"; - String p = "urn:gem#pred"; - - String ts = "798497748386999999"; - - Text t1 = new Text(TripleValueType.subject.name() + DELIM + s + DELIM + 1); - Text t2 = new Text(TripleValueType.predicate.name() + DELIM + p + DELIM + 2); - Text t3 = new Text(TripleValueType.subjectpredicate.name() + DELIM + s + DELIM + p + DELIM + ts); - - byte[] b = new byte[0]; - byte[] c = "25".getBytes(); - byte[] d = "47".getBytes(); - byte[] e = "15".getBytes(); - - Key key1 = new Key(t1.getBytes(), b, b, b, 1); - Key key2 = new Key(t2.getBytes(), b, b, b, 1); - Key key3 = new Key(t3.getBytes(), b, b, b, 1); - Value val1 = new Value(c); - Value val2 = new Value(d); - Value val3 = new Value(e); - - - - // System.out.println("Keys are " + key1 + " and " + key2); - - new MapDriver() - .withMapper(new JoinSelectProspectOutput.CardinalityMapper()) - .withInput(key1, val1) - .withInput(key2, val2) - .withInput(key3, val3) - .withOutput(new CompositeType(s, 1), new TripleCard(new CardinalityType(25, "subject", 1))) - .withOutput(new CompositeType(p, 1), new TripleCard(new CardinalityType(47, "predicate", 2))) - .withOutput(new CompositeType(s + DELIM + p, 1), - new TripleCard(new CardinalityType(15, "subjectpredicate", Long.parseLong(ts)))).runTest(); - - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java deleted file mode 100644 index 98236d3e7..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java +++ /dev/null @@ -1,60 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; - -import mvm.rya.joinselect.mr.JoinSelectStatisticsSum; -import mvm.rya.joinselect.mr.utils.CardList; -import mvm.rya.joinselect.mr.utils.TripleEntry; - -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mrunit.mapreduce.MapDriver; -import org.junit.Test; - -public class JoinSelectStatisticsSumTest { - - @Test - public void testFullTripleEntry() throws InterruptedException, IOException { - - TripleEntry te1 = new TripleEntry(new Text("urn:gem:etype#1234"), new Text("urn:gem#pred"), new Text("subject"), new Text("predicate"), new Text("object")); - CardList cl = new CardList(34, 52, 63, 0, 0, 0); - TripleEntry te2 = new TripleEntry(new Text("urn:gem:etype#1234"), new Text(""), new Text("subject"), new Text(""), new Text("object")); - TripleEntry te3 = new TripleEntry(new Text("urn:gem#pred"), new Text(""), new Text("predicate"), new Text(""), new Text("object")); - - new MapDriver().withMapper(new JoinSelectStatisticsSum.CardinalityIdentityMapper()).withInput(te1, cl) - .withOutput(te2, cl).withOutput(te3, cl).withOutput(te1, cl).runTest(); - - } - - @Test - public void testPartialTripleEntry() throws InterruptedException, IOException { - - TripleEntry te1 = new TripleEntry(new Text("urn:gem:etype#1234"), new Text(""), new Text("subject"), new Text(""), new Text("object")); - CardList cl = new CardList(34, 52, 63, 0, 0, 0); - - new MapDriver().withMapper(new JoinSelectStatisticsSum.CardinalityIdentityMapper()).withInput(te1, cl) - .withOutput(te1, cl).runTest(); - - } - -} diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java deleted file mode 100644 index 7061a2ce0..000000000 --- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java +++ /dev/null @@ -1,872 +0,0 @@ -package mvm.rya.joinselect.mr; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.INPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.INSTANCE; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.OUTPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PASSWORD; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_OUTPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_TABLE; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SELECTIVITY_TABLE; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_OUTPUTPATH; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_TABLE; -import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.USERNAME; - -import java.io.File; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.joinselect.mr.JoinSelectAggregate.JoinReducer; -import mvm.rya.joinselect.mr.JoinSelectAggregate.JoinSelectAggregateMapper; -import mvm.rya.joinselect.mr.JoinSelectAggregate.JoinSelectGroupComparator; -import mvm.rya.joinselect.mr.JoinSelectAggregate.JoinSelectPartitioner; -import mvm.rya.joinselect.mr.JoinSelectAggregate.JoinSelectSortComparator; -import mvm.rya.joinselect.mr.JoinSelectProspectOutput.CardinalityMapper; -import mvm.rya.joinselect.mr.JoinSelectSpoTableOutput.JoinSelectMapper; -import mvm.rya.joinselect.mr.JoinSelectStatisticsSum.CardinalityIdentityCombiner; -import mvm.rya.joinselect.mr.JoinSelectStatisticsSum.CardinalityIdentityMapper; -import mvm.rya.joinselect.mr.JoinSelectStatisticsSum.CardinalityIdentityReducer; -import mvm.rya.joinselect.mr.utils.CardList; -import mvm.rya.joinselect.mr.utils.CompositeType; -import mvm.rya.joinselect.mr.utils.JoinSelectStatsUtil; -import mvm.rya.joinselect.mr.utils.TripleCard; -import mvm.rya.joinselect.mr.utils.TripleEntry; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.lib.input.MultipleInputs; -import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; -import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class JoinSelectStatisticsTest { - - private static final String PREFIX = JoinSelectStatisticsTest.class.getSimpleName(); - - private static final String DELIM = "\u0000"; - private static final String uri = "uri:"; - private List cardList = Arrays.asList("subject", "predicate", "object"); - private List aggCardList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - private static File SPOOUT; - private static File PROSPECTSOUT; - private static File tempDir; - private Connector c; - private RyaTripleContext ryaContext; - private static final String INSTANCE_NAME = "mapreduce_instance"; - - private static class JoinSelectTester1 extends Configured implements Tool { - - - - @Override - public int run(String[] args) throws Exception { - - Configuration conf = getConf(); - - String inTable = conf.get(SPO_TABLE); - String outPath = conf.get(SPO_OUTPUTPATH); - - - assert inTable != null && outPath != null; - - Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); - - initTabToSeqFileJob(job, inTable, outPath); - job.setMapperClass(JoinSelectMapper.class); - job.setNumReduceTasks(0); - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - } - } - - private static class JoinSelectTester2 extends Configured implements Tool { - - - - @Override - public int run(String[] args) throws Exception { - - Configuration conf = getConf(); - - String inTable = conf.get(PROSPECTS_TABLE); - System.out.println("Table is " + inTable); - String outPath = conf.get(PROSPECTS_OUTPUTPATH); - - - assert inTable != null && outPath != null; - - Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); - - initTabToSeqFileJob(job, inTable, outPath); - job.setMapperClass(CardinalityMapper.class); - job.setNumReduceTasks(0); - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - } - } - - - private static class JoinSelectTester4 extends Configured implements Tool { - - - - @Override - public int run(String[] args) throws Exception { - - Configuration conf = getConf(); - String outpath = conf.get(OUTPUTPATH); - - Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); - - MultipleInputs.addInputPath(job, new Path(PROSPECTSOUT.getAbsolutePath()), - SequenceFileInputFormat.class, JoinSelectAggregateMapper.class); - MultipleInputs.addInputPath(job,new Path(SPOOUT.getAbsolutePath()) , - SequenceFileInputFormat.class, JoinSelectAggregateMapper.class); - job.setMapOutputKeyClass(CompositeType.class); - job.setMapOutputValueClass(TripleCard.class); - - tempDir = new File(File.createTempFile(outpath, "txt").getParentFile(), System.currentTimeMillis() + ""); - SequenceFileOutputFormat.setOutputPath(job, new Path(tempDir.getAbsolutePath())); - job.setOutputFormatClass(SequenceFileOutputFormat.class); - job.setOutputKeyClass(TripleEntry.class); - job.setOutputValueClass(CardList.class); - - - job.setSortComparatorClass(JoinSelectSortComparator.class); - job.setGroupingComparatorClass(JoinSelectGroupComparator.class); - job.setPartitionerClass(JoinSelectPartitioner.class); - job.setReducerClass(JoinReducer.class); - job.setNumReduceTasks(32); - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - } - } - - - - private static class JoinSelectTester3 extends Configured implements Tool { - - - - @Override - public int run(String[] args) throws Exception { - - Configuration conf = getConfig(); - - String outTable = conf.get(SELECTIVITY_TABLE); - String inPath = conf.get(INPUTPATH); - - - assert outTable != null && inPath != null; - - Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); - job.setJarByClass(this.getClass()); - initSumMRJob(job, inPath, outTable); - - job.setMapperClass(CardinalityIdentityMapper.class); - job.setCombinerClass(CardinalityIdentityCombiner.class); - job.setReducerClass(CardinalityIdentityReducer.class); - job.setNumReduceTasks(32); - job.waitForCompletion(true); - - return job.isSuccessful() ? 0 : 1; - - } - - - - } - - - - - - - - public class JoinSelectTestDriver extends Configured implements Tool { - - Configuration conf = getConfig(); - - @Override - public int run(String[] args) throws Exception { - - int res0 = ToolRunner.run(conf, new JoinSelectTester1(), args); - int res1 = 1; - int res2 = 1; - int res3 = 1; - - - - if(res0 == 0) { - res1 = ToolRunner.run(conf, new JoinSelectTester2(), args); - } - if(res1 == 0) { - res2 = ToolRunner.run(conf, new JoinSelectTester4(), args); - } - if(res2 == 0) { - res3 = ToolRunner.run(conf, new JoinSelectTester3(), args); - } - - return res3; - } - - } - - - - - private static Configuration getConfig() { - - Configuration conf = new Configuration(); - conf.set("fs.default.name", "file:///"); - conf.set("mapreduce.framework.name", "local"); - conf.set("spo.table", "rya_spo"); - conf.set("prospects.table", "rya_prospects"); - conf.set("selectivity.table", "rya_selectivity"); - conf.set("auths", ""); - conf.set("instance",INSTANCE_NAME); - conf.set("username","root"); - conf.set("password", ""); - conf.set("inputpath","temp"); - conf.set("outputpath","temp"); - conf.set("prospects.outputpath","prospects"); - conf.set("spo.outputpath", "spo"); - - - return conf; - - } - - - - - - - - - - - public static void initTabToSeqFileJob(Job job, String intable, String outpath) throws AccumuloSecurityException, IOException { - - Configuration conf = job.getConfiguration(); - - String username = conf.get(USERNAME); - System.out.println("Username is " + username); - String password = conf.get(PASSWORD); - String instance = conf.get(INSTANCE); - System.out.println("Instance is " + instance); - - - AccumuloInputFormat.setMockInstance(job, instance); - AccumuloInputFormat.setConnectorInfo(job, username, new PasswordToken(password)); - AccumuloInputFormat.setInputTableName(job, intable); - - job.setInputFormatClass(AccumuloInputFormat.class); - job.setMapOutputKeyClass(CompositeType.class); - job.setMapOutputValueClass(TripleCard.class); - - System.out.println("Outpath is " + outpath); - - // OUTPUT - if(outpath.equals("spo")) { - SPOOUT = new File(File.createTempFile(outpath, "txt").getParentFile(), System.currentTimeMillis() + "spo"); - SequenceFileOutputFormat.setOutputPath(job, new Path(SPOOUT.getAbsolutePath())); - } else { - PROSPECTSOUT = new File(File.createTempFile(outpath, "txt").getParentFile(), System.currentTimeMillis() + "prospects"); - SequenceFileOutputFormat.setOutputPath(job, new Path(PROSPECTSOUT.getAbsolutePath())); - } - job.setOutputFormatClass(SequenceFileOutputFormat.class); - job.setOutputKeyClass(CompositeType.class); - job.setOutputValueClass(TripleCard.class); - - } - - public static void initSumMRJob(Job job, String inputPath, String outtable) throws AccumuloSecurityException, IOException { - - Configuration conf = job.getConfiguration(); - - String username = conf.get(USERNAME); - String password = conf.get(PASSWORD); - String instance = conf.get(INSTANCE); - - - - AccumuloOutputFormat.setConnectorInfo(job, username, new PasswordToken(password)); - AccumuloOutputFormat.setMockInstance(job, instance); - AccumuloOutputFormat.setDefaultTableName(job, outtable); - - - SequenceFileInputFormat.addInputPath(job, new Path(tempDir.getAbsolutePath())); - job.setInputFormatClass(SequenceFileInputFormat.class); - job.setMapOutputKeyClass(TripleEntry.class); - job.setMapOutputValueClass(CardList.class); - - - job.setOutputFormatClass(AccumuloOutputFormat.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Mutation.class); - - - } - - - - - - @Before - public void init() throws AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException { - - MockInstance mockInstance = new MockInstance(INSTANCE_NAME); - c = mockInstance.getConnector("root", new PasswordToken("")); - - if (c.tableOperations().exists("rya_prospects")) { - c.tableOperations().delete("rya_prospects"); - } - if (c.tableOperations().exists("rya_selectivity")) { - c.tableOperations().delete("rya_selectivity"); - } - if (c.tableOperations().exists("rya_spo")) { - c.tableOperations().delete("rya_spo"); - } - - - c.tableOperations().create("rya_spo"); - c.tableOperations().create("rya_prospects"); - c.tableOperations().create("rya_selectivity"); - ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(getConfig())); - } - - - - - - - - - - @Test - public void testMap1() throws Exception { - init(); - - System.out.println("*****************************Test1**************************** "); - - BatchWriter bw_table1 = c.createBatchWriter("rya_spo", new BatchWriterConfig()); - for (int i = 1; i < 3; i++) { - - RyaStatement rs = new RyaStatement(new RyaURI(uri + i), new RyaURI(uri + 5), new RyaType(uri + (i + 2))); - Map tripleRowMap = ryaContext.serializeTriple(rs); - TripleRow tripleRow = tripleRowMap.get(TABLE_LAYOUT.SPO); - Mutation m = JoinSelectStatsUtil.createMutation(tripleRow); - bw_table1.addMutation(m); - - - } - bw_table1.close(); - - BatchWriter bw_table2 = c.createBatchWriter("rya_prospects", new BatchWriterConfig()); - for (int i = 1; i < 6; i++) { - - int j = 1; - - for (String s : cardList) { - Mutation m = new Mutation(new Text(s + DELIM + uri + i + DELIM + i)); - m.put(new Text(), new Text(), new Value(new IntWritable(i + j).toString().getBytes())); - bw_table2.addMutation(m); - j++; - } - - } - bw_table2.close(); - - - - - - Assert.assertEquals(0, ToolRunner.run(new JoinSelectTestDriver(), new String[]{""})); - Scanner scan = c.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Join type is " + entry.getKey().getColumnFamily().toString()); - System.out.println("Value is " + entry.getKey().getColumnQualifier().toString()); - } - - Scanner scan1 = c.createScanner("rya_selectivity" , new Authorizations()); - scan1.setRange(Range.prefix("predicate" +DELIM + uri + 5)); - int i = 5; - - for (Map.Entry entry : scan1) { - - int val1 = 5 + 2*i; - int val2 = 5 + 2*(i-1); - int val = Integer.parseInt(entry.getKey().getColumnQualifier().toString()); - - if(i < 3) { - Assert.assertTrue( val == val1); - } - if(i >= 3 && i < 6) { - Assert.assertTrue(val == val2); - } - i--; - } - Assert.assertTrue(i == -1); - - - - Scanner scan2 = c.createScanner("rya_selectivity" , new Authorizations()); - scan2.setRange(Range.prefix("object" +DELIM + uri + 3)); - int j = 5; - - for (Map.Entry entry : scan2) { - - int val1 = 5 + (j-2); - int val2 = 2+j; - int val = Integer.parseInt(entry.getKey().getColumnQualifier().toString()); - - if(j < 3) { - Assert.assertTrue( val == val2); - } - if(j >= 3 && j < 6) { - Assert.assertTrue(val == val1); - } - j--; - } - Assert.assertTrue(j == -1); - - - - - Scanner scan3 = c.createScanner("rya_selectivity", new Authorizations()); - scan3.setRange(Range.prefix("objectsubject" + DELIM + uri + 3 +DELIM +uri +1 )); - int k = 8; - - for (Map.Entry entry : scan3) { - - int val = Integer.parseInt(entry.getKey().getColumnQualifier().toString()); - - Assert.assertTrue(val == k); - k--; - } - Assert.assertTrue(k == 5); - - - - - - - - } - - - - - - @Test - public void testMap2() throws Exception { - - System.out.println("*********************Test2******************* "); - - init(); - - BatchWriter bw_table1 = c.createBatchWriter("rya_spo", new BatchWriterConfig()); - for (int i = 1; i < 4; i++) { - - RyaStatement rs = new RyaStatement(new RyaURI(uri + 1), new RyaURI(uri + 2), new RyaType(uri + i)); - Map tripleRowMap = ryaContext.serializeTriple(rs); - TripleRow tripleRow = tripleRowMap.get(TABLE_LAYOUT.SPO); - Mutation m = JoinSelectStatsUtil.createMutation(tripleRow); - bw_table1.addMutation(m); - - } - bw_table1.close(); - - BatchWriter bw_table2 = c.createBatchWriter("rya_prospects", new BatchWriterConfig()); - for (int i = 1; i < 4; i++) { - - for (String s : cardList) { - Mutation m = new Mutation(new Text(s + DELIM + uri + i + DELIM + i)); - m.put(new Text(), new Text(), new Value(new IntWritable(i + 2).toString().getBytes())); - bw_table2.addMutation(m); - } - - } - bw_table2.close(); - - Assert.assertEquals(0, ToolRunner.run(new JoinSelectTestDriver(), new String[]{""})); - Scanner scan1 = c.createScanner("rya_selectivity" , new Authorizations()); - scan1.setRange(Range.prefix("subject" +DELIM + uri + 1)); - int i = 0; - - for (Map.Entry entry : scan1) { - - Assert.assertTrue(entry.getKey().getColumnQualifier().toString().equals("12")); - i++; - } - Assert.assertTrue(i == 6); - - Scanner scan2 = c.createScanner("rya_selectivity" , new Authorizations()); - scan2.setRange(Range.prefix("predicate" +DELIM + uri + 2)); - int j = 0; - - for (Map.Entry entry : scan2) { - - if(j < 3) { - Assert.assertTrue(entry.getKey().getColumnQualifier().toString().equals("12")); - } - if(j > 3 && j < 6) { - Assert.assertTrue(entry.getKey().getColumnQualifier().toString().equals("9")); - } - j++; - } - Assert.assertTrue(j == 6); - - Scanner scan3 = c.createScanner("rya_selectivity" , new Authorizations()); - scan3.setRange(Range.prefix("predicateobject" +DELIM + uri + 2 +DELIM + uri + 2)); - int k = 0; - - for (Map.Entry entry : scan3) { - Assert.assertTrue(entry.getKey().getColumnQualifier().toString().equals("3")); - k++; - } - Assert.assertTrue(k == 3); - - - } - - - - - @Test - public void testMap3() throws Exception { - init(); - - System.out.println("*************************Test3**************************** "); - - BatchWriter bw_table1 = c.createBatchWriter("rya_spo", new BatchWriterConfig()); - for (int i = 1; i < 3; i++) { - for (int j = 1; j < 3; j++) { - for (int k = 1; k < 3; k++) { - - RyaStatement rs = new RyaStatement(new RyaURI(uri + i), new RyaURI(uri + (j)), new RyaType(uri + k)); - Map tripleRowMap = ryaContext.serializeTriple(rs); - TripleRow tripleRow = tripleRowMap.get(TABLE_LAYOUT.SPO); - Mutation m = JoinSelectStatsUtil.createMutation(tripleRow); - bw_table1.addMutation(m); - - } - } - - } - bw_table1.close(); - - BatchWriter bw_table2 = c.createBatchWriter("rya_prospects", new BatchWriterConfig()); - for (int i = 1; i < 3; i++) { - - int k = 1; - for (String s : cardList) { - Mutation m = new Mutation(new Text(s + DELIM + uri + i + DELIM + i)); - m.put(new Text(), new Text(), new Value(new IntWritable(i + k).toString().getBytes())); - bw_table2.addMutation(m); - k++; - } - - for (int j = 1; j < 3; j++) { - k = 1; - for (String s : aggCardList) { - Mutation m = new Mutation(new Text(s + DELIM + uri + i + DELIM + uri + j + DELIM + i)); - m.put(new Text(), new Text(), new Value(new IntWritable(i + k +j).toString().getBytes())); - bw_table2.addMutation(m); - k++; - } - } - - } - bw_table2.close(); - - - - - - Assert.assertEquals(0, ToolRunner.run(new JoinSelectTestDriver(), new String[]{""})); - Scanner scan = c.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Join type is " + entry.getKey().getColumnFamily().toString()); - System.out.println("Value is " + entry.getKey().getColumnQualifier().toString()); - } - - - - Scanner scan1 = c.createScanner("rya_selectivity" , new Authorizations()); - scan1.setRange(Range.prefix("subject" +DELIM + uri + 1)); - int i = 0; - - for (Map.Entry entry : scan1) { - - Key key = entry.getKey(); - String s = key.getColumnFamily().toString(); - int val = Integer.parseInt(key.getColumnQualifier().toString()); - - if(s.equals("predicatepredicate")) { - Assert.assertTrue(val == 14); - } - if(s.equals("objectobject")) { - Assert.assertTrue(val == 18); - } - if(s.equals("predicateobjectpredicateobject")) { - Assert.assertTrue(val == 28); - } - if(s.equals("predicateobjectsubjectpredicate")) { - Assert.assertTrue(val == 20); - } - if(s.equals("predicateobjectobjectsubject")) { - Assert.assertTrue(val == 16); - } - - i++; - } - Assert.assertTrue(i == 12); - - - - - - - - } - - - - - - @Test - public void testMap4() throws Exception { - init(); - - System.out.println("*************************Test4**************************** "); - System.out.println("*************************Test4**************************** "); - - BatchWriter bw_table1 = c.createBatchWriter("rya_spo", new BatchWriterConfig()); - for (int i = 1; i < 3; i++) { - for (int j = 1; j < 3; j++) { - for (int k = 1; k < 3; k++) { - - if(j == 1 && k ==2) { - break; - } - - RyaStatement rs = new RyaStatement(new RyaURI(uri + i), new RyaURI(uri + (j)), new RyaType(uri + k)); - Map tripleRowMap = ryaContext.serializeTriple(rs); - TripleRow tripleRow = tripleRowMap.get(TABLE_LAYOUT.SPO); - Mutation m = JoinSelectStatsUtil.createMutation(tripleRow); - bw_table1.addMutation(m); - - } - } - - } - bw_table1.close(); - - BatchWriter bw_table2 = c.createBatchWriter("rya_prospects", new BatchWriterConfig()); - for (int i = 1; i < 3; i++) { - - int k = 1; - for (String s : cardList) { - Mutation m = new Mutation(new Text(s + DELIM + uri + i + DELIM + i)); - m.put(new Text(), new Text(), new Value(new IntWritable(i + k).toString().getBytes())); - bw_table2.addMutation(m); - k++; - } - - for (int j = 1; j < 3; j++) { - k = 1; - for (String s : aggCardList) { - Mutation m = new Mutation(new Text(s + DELIM + uri + i + DELIM + uri + j + DELIM + i)); - m.put(new Text(), new Text(), new Value(new IntWritable(i + k + 2*j).toString().getBytes())); - bw_table2.addMutation(m); - k++; - } - } - - } - bw_table2.close(); - - - - - - Assert.assertEquals(0, ToolRunner.run(new JoinSelectTestDriver(), new String[]{""})); - Scanner scan = c.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Join type is " + entry.getKey().getColumnFamily().toString()); - System.out.println("Value is " + entry.getKey().getColumnQualifier().toString()); - } - - - - Scanner scan1 = c.createScanner("rya_selectivity" , new Authorizations()); - scan1.setRange(Range.prefix("subject" +DELIM + uri + 1)); - int i = 0; - - for (Map.Entry entry : scan1) { - - Key key = entry.getKey(); - String s = key.getColumnFamily().toString(); - int val = Integer.parseInt(key.getColumnQualifier().toString()); - - if(s.equals("predicatepredicate")) { - Assert.assertTrue(val == 11); - } - if(s.equals("objectobject")) { - Assert.assertTrue(val == 13); - } - if(s.equals("predicateobjectobjectpredicate")) { - Assert.assertTrue(val == 26); - } - if(s.equals("predicateobjectpredicateobject")) { - Assert.assertTrue(val == 25); - } - if(s.equals("predicateobjectsubjectpredicate")) { - Assert.assertTrue(val == 19); - } - if(s.equals("predicateobjectpredicatesubject")) { - Assert.assertTrue(val == 20); - } - - i++; - } - Assert.assertTrue(i == 12); - - - - Scanner scan2 = c.createScanner("rya_selectivity" , new Authorizations()); - scan2.setRange(Range.prefix("predicate" +DELIM + uri + 1)); - int j = 0; - - for (Map.Entry entry : scan2) { - - Key key = entry.getKey(); - String s = key.getColumnFamily().toString(); - int val = Integer.parseInt(key.getColumnQualifier().toString()); - - if(s.equals("subjectsubject")) { - Assert.assertTrue(val == 5); - } - if(s.equals("objectobject")) { - Assert.assertTrue(val == 8); - } - if(s.equals("objectsubjectsubjectpredicate")) { - Assert.assertTrue(val == 11); - } - if(s.equals("objectsubjectpredicateobject")) { - Assert.assertTrue(val == 15); - } - if(s.equals("objectsubjectobjectsubject")) { - Assert.assertTrue(val == 9); - } - if(s.equals("objectsubjectsubjectobject")) { - Assert.assertTrue(val == 10); - } - - j++; - } - Assert.assertTrue(j == 12); - - - - - - - - - } - - - - - - - - - - - - -} - - - - - - - diff --git a/extras/rya.prospector/src/test/resources/stats_cluster_config.xml b/extras/rya.prospector/src/test/resources/stats_cluster_config.xml deleted file mode 100644 index 5c96044cc..000000000 --- a/extras/rya.prospector/src/test/resources/stats_cluster_config.xml +++ /dev/null @@ -1,97 +0,0 @@ - - - - - - - - - - - mock - true - - - instance - accumulo - - - zookeepers - zoo1,zoo2,zoo3 - - - - - username - user - - - password - pass - - - - - spo.table - rya_spo - - - prospects.table - rya_prospects - - - selectivity.table - rya_selectivity - - - auths - U - - - prospector.auths - U - - - - prospector.intable - rya_spo - - - prospector.outtable - rya_prospects - - - - inputpath - /tmp/RyaStats/JoinSelectStatisticsSumInput - - - outputpath - /tmp/RyaStats/JoinSelectStatisticsSumInput - - - prospects.outputpath - /tmp/RyaStats/ProspectsOutput - - - spo.outputpath - /tmp/RyaStats/SpoOutput - - diff --git a/extras/tinkerpop.rya/pom.xml b/extras/tinkerpop.rya/pom.xml deleted file mode 100644 index 88d7a09db..000000000 --- a/extras/tinkerpop.rya/pom.xml +++ /dev/null @@ -1,104 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.extras - 3.2.10-SNAPSHOT - - - tinkerpop.rya - Apache Rya Tinkerpop - - - - org.apache.rya - rya.sail - - - org.apache.rya - accumulo.rya - - - - com.tinkerpop.gremlin - gremlin-groovy - - - - com.tinkerpop.rexster - rexster-server - - - com.tinkerpop.blueprints - blueprints-sail-graph - - - - junit - junit - test - - - - - - - org.codehaus.gmaven - gmaven-plugin - - - org.codehaus.groovy - groovy-all - 1.8.6 - - - org.codehaus.gmaven.runtime - gmaven-runtime-1.7 - 1.3 - - - org.codehaus.groovy - groovy-all - - - - - - - - 1.7 - - - generateStubs - compile - generateTestStubs - testCompile - - - - - - - - diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy deleted file mode 100644 index fc3419d40..000000000 --- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.blueprints.config - -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.rexster.config.GraphConfiguration -import com.tinkerpop.rexster.config.GraphConfigurationContext; - -import mvm.rya.accumulo.AccumuloRdfConfiguration -import mvm.rya.accumulo.AccumuloRyaDAO -import mvm.rya.blueprints.sail.RyaSailGraph -import mvm.rya.rdftriplestore.RdfCloudTripleStore -import mvm.rya.rdftriplestore.inference.InferenceEngine -import org.apache.commons.configuration.Configuration -import static mvm.rya.accumulo.mr.utils.MRUtils.* -import org.apache.commons.configuration.MapConfiguration -import mvm.rya.blueprints.sail.RyaSailEdge -import mvm.rya.blueprints.sail.RyaSailVertex -import org.apache.accumulo.core.client.mock.MockInstance -import org.apache.accumulo.core.client.ZooKeeperInstance - -/** - * Date: 5/8/12 - * Time: 5:38 PM - */ -class RyaGraphConfiguration implements GraphConfiguration { - - def instance, zk, user, pwd, tablePrefix, auths, cv, ttl, mock - - public static final RyaSailGraph createGraph(Map props) { - if (props == null) props = [:] - def graphConfiguration = new RyaGraphConfiguration() - RyaGraphConfiguration.load() - return graphConfiguration.configureGraphInstance(new GraphConfigurationContext(new MapConfiguration(props), new HashMap())) - } - - public static void load() { - RyaSailEdge.metaClass.getSubj = { (delegate as RyaSailEdge).getVertex(Direction.OUT).id } - RyaSailEdge.metaClass.getPred = { (delegate as RyaSailEdge).label } - RyaSailEdge.metaClass.getObj = { (delegate as RyaSailEdge).getVertex(Direction.IN).id } - RyaSailEdge.metaClass.getCntxt = { (delegate as RyaSailEdge).namedGraph } - RyaSailEdge.metaClass.getStmt = { (delegate as RyaSailEdge).rawEdge } - } - - @Override - public RyaSailGraph configureGraphInstance(GraphConfigurationContext context) { - Configuration graphConfiguration = context.getProperties() - instance = graphConfiguration.getString(AC_INSTANCE_PROP) - zk = graphConfiguration.getString(AC_ZK_PROP) - user = graphConfiguration.getString(AC_USERNAME_PROP) - pwd = graphConfiguration.getString(AC_PWD_PROP) - mock = (graphConfiguration.containsKey(AC_MOCK_PROP)) ? (graphConfiguration.getBoolean(AC_MOCK_PROP)) : (null) - assert instance != null && (zk != null || mock != null) && user != null && pwd != null - - def ryaConfiguration = new AccumuloRdfConfiguration(); - //set other properties - graphConfiguration.keys.each { key -> - def val = graphConfiguration.getString(key) - if (val != null) { - ryaConfiguration.set(key, val) - } - } - //set table prefix - ryaConfiguration.setTablePrefix(ryaConfiguration.getTablePrefix()) - - def store = new RdfCloudTripleStore(); - store.setConf(ryaConfiguration); - def cryadao = new AccumuloRyaDAO(); - def connector = null - if (mock) { - connector = new MockInstance(instance).getConnector(user, pwd); - } else { - connector = new ZooKeeperInstance(instance, zk).getConnector(user, pwd); - } - cryadao.setConnector(connector); - store.setRyaDAO(cryadao); -// def ceval = new (); -// ceval.setConnector(connector); -// store.setRdfEvalStatsDAO(ceval); - def inferenceEngine = new InferenceEngine(); - inferenceEngine.setRyaDAO(cryadao); - store.setInferenceEngine(inferenceEngine); - - return new RyaSailGraph(store) - } -} diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy deleted file mode 100644 index 73c4fc17d..000000000 --- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.blueprints.sail -// -//import com.tinkerpop.blueprints.pgm.impls.sail.SailEdge -//import org.openrdf.model.Statement -//import org.openrdf.model.impl.ContextStatementImpl -//import org.openrdf.model.impl.StatementImpl -// -///** -// * Blueprints Edge for Sail stores -// * outVertex edge inVertex -// * -// * Date: 5/9/12 -// * Time: 9:03 AM -// */ -//class RyaSailEdge extends SailEdge { -// -// public static final String SPLIT = "|" -// -// RyaSailEdge(Statement rawEdge, RyaSailGraph graph) { -// super(rawEdge, graph) -// } -// -// @Override -// Object getId() { -// def statement = this.getRawEdge() -// return formatId(statement); -// } -// -// /** -// * Returns a formatted id for a full statement. -// * @param statement -// * @return -// */ -// static String formatId(Statement statement) { -// if (null != statement.getContext()) -// return (new StringBuilder()).append(statement.getSubject()).append(SPLIT).append(statement.getPredicate()).append(SPLIT).append(statement.getObject()).append(SPLIT).append(statement.getContext()).toString(); -// else -// return (new StringBuilder()).append(statement.getSubject()).append(SPLIT).append(statement.getPredicate()).append(SPLIT).append(statement.getObject()).toString() -// } -// -//// public static RyaSailEdge fromId(String id, RyaSailGraph graph) { -//// def decodedId = URLDecoder.decode(id) -//// def statement = RdfIO.readStatement(ByteStreams.newDataInput(decodedId.bytes), RdfCloudTripleStoreConstants.VALUE_FACTORY) -//// println statement -//// return new RyaSailEdge(statement, graph) -//// } -// -// /** -// * -// * @param id formatted from getId method -// * @param graph -// * @return -// */ -// public static RyaSailEdge fromId(String id, RyaSailGraph graph) { -// assert id != null -// def split = id.split("\\|") -// if(split.length < 3) { -// return null -// } -// String subj_s = split[0].trim() -// def subj = graph.createValue(subj_s) -// String pred_s = split[1].trim() -// def pred = graph.createValue(pred_s) -// String obj_s = split[2].trim() -// def obj = graph.createValue(obj_s) -// if(split.length == 4) { -// //context available -// def context = graph.createValue(split[3]) -// return new RyaSailEdge(new ContextStatementImpl(subj, pred, obj, context), graph); -// } else { -// return new RyaSailEdge(new StatementImpl(subj, pred, obj), graph); -// } -// } -// -//} diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy deleted file mode 100644 index 8d04c75a6..000000000 --- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.blueprints.sail - -import com.tinkerpop.blueprints.Edge -import info.aduna.iteration.Iteration -import info.aduna.iteration.Iterations -import info.aduna.iteration.IteratorIteration -import org.openrdf.model.Statement -import org.openrdf.sail.SailException - -/** - * Edge iterable that returns RyaSailEdge - * Date: 5/9/12 - * Time: 9:26 AM - */ -class RyaSailEdgeSequence implements Iterable, Iterator -{ - - protected Iteration statements; - protected RyaSailGraph graph; - - public RyaSailEdgeSequence(Iteration statements, RyaSailGraph graph) - { - this.statements = statements; - this.graph = graph; - } - - public RyaSailEdgeSequence(Iterator iterator, RyaSailGraph graph) { - this(new IteratorIteration(iterator), graph) - } - - public RyaSailEdgeSequence() - { - statements = null; - graph = null; - } - - public Iterator iterator() - { - return this; - } - - public void remove() - { - throw new UnsupportedOperationException(); - } - - public boolean hasNext() - { - if(null == statements) - return false; - try - { - if(statements.hasNext()) - return true; - } - catch(SailException e) - { - throw new RuntimeException(e.getMessage(), e); - } - Iterations.closeCloseable(statements); - return false; - } - - public Edge next() - { - if(null == statements) - throw new NoSuchElementException(); - try - { - def statement = (Statement) statements.next() - return new RyaSailEdge(statement, graph); - } - catch(SailException e) - { - throw new RuntimeException(e.getMessage()); - } - catch(NoSuchElementException e) - { - try - { - Iterations.closeCloseable(statements); - } - catch(SailException e2) - { - throw new RuntimeException(e2.getMessage(), e2); - } - throw e; - } - } - -} diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy deleted file mode 100644 index 7c78e319f..000000000 --- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.blueprints.sail - -import com.tinkerpop.blueprints.impls.sail.SailGraph -import com.tinkerpop.blueprints.impls.sail.SailHelper -import com.tinkerpop.blueprints.impls.sail.SailTokens -import org.openrdf.model.Literal -import org.openrdf.model.Resource -import org.openrdf.model.URI -import org.openrdf.model.Value -import org.openrdf.model.impl.BNodeImpl -import org.openrdf.model.impl.URIImpl -import org.openrdf.sail.Sail -import org.openrdf.sail.SailConnection -import com.tinkerpop.blueprints.Edge -import com.tinkerpop.blueprints.Vertex -import com.tinkerpop.blueprints.util.MultiIterable -import org.openrdf.sail.SailException - -/** - * Blueprints Graph to interact with Sail stores - * - * Date: 5/8/12 - * Time: 5:52 PM - */ -class RyaSailGraph extends SailGraph { - - public static final Resource[] EMPTY_CONTEXT = new Resource[0] - - RyaSailGraph(Sail sail) { - super(sail) - } - - /** - * For some reason, the SailGraph does not implement this method. - * The id is the full formatted id of the edge (rdf statement) - * - * @param id - * @return - */ - @Override - Edge getEdge(Object id) { - assert id != null - return RyaSailEdge.fromId(id, this) - } - - @Override - Iterable getEdges() { - return getEdgesSequence(); - } - - protected RyaSailEdgeSequence getEdgesSequence() { - return new RyaSailEdgeSequence(((SailConnection) sailConnection.get()).getStatements(null, null, null, false, new Resource[0]), this) - } - - @Override - Iterable getVertices() { - return new RyaSailVertexSequence(this.getEdgesSequence()) - } - - /** - * Utility method that can take a string and make it a Resource, Uri, or Literal - * @param resource - * @return - */ - public Value createValue(String resource) { - if (SailHelper.isBNode(resource)) - new BNodeImpl(resource.substring(2)); - Literal literal; - if ((literal = SailHelper.makeLiteral(resource, this)) != null) - return literal - if (resource.contains(":") || resource.contains("/") || resource.contains("#")) { - resource = expandPrefix(resource); - new URIImpl(resource); - } else { - throw new RuntimeException((new StringBuilder()).append(resource).append(" is not a valid URI, blank node, or literal value").toString()); - } - } - - public Vertex createVertex(String resource) { - return new RyaSailVertex(createValue(resource), this); - } - - @Override - public Vertex addVertex(Object id) { - if (null == id) - id = SailTokens.URN_UUID_PREFIX + UUID.randomUUID().toString(); - return createVertex(id.toString()); - } - - @Override - public Vertex getVertex(final Object id) { - if (null == id) - throw new IllegalArgumentException("Element identifier cannot be null"); - - try { - return createVertex(id.toString()); - } catch (RuntimeException re) { - return null; - } - } - - public Iterable query(final String subj, final String pred, final String obj, final String cntxt) { - return new RyaSailEdgeSequence(sailConnection.get().getStatements( - (subj != null) ? (Resource) createValue(subj) : null, - (pred != null) ? (URI) createValue(pred) : null, - (obj != null) ? createValue(obj) : null, - false, - (cntxt != null) ? (Resource) createValue(cntxt) : EMPTY_CONTEXT), - this); - } - -} diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy deleted file mode 100644 index 96ef08e4d..000000000 --- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.blueprints.sail -// -//import com.tinkerpop.blueprints.pgm.impls.MultiIterable -//import com.tinkerpop.blueprints.pgm.impls.sail.SailVertex -//import org.openrdf.model.Resource -//import org.openrdf.model.Value -//import org.openrdf.model.impl.URIImpl -//import org.openrdf.sail.SailException -//import com.tinkerpop.blueprints.pgm.Edge -// -///** -// * Extension to SailVertex to use RyaSailEdgeSequence underneath -// * Date: 5/9/12 -// * Time: 3:40 PM -// */ -//class RyaSailVertex extends SailVertex { -// -// def sailGraph -// -// RyaSailVertex(Value rawVertex, RyaSailGraph graph) { -// super(rawVertex, graph) -// sailGraph = graph -// } -// -// @Override -// public Iterable getOutEdges(final String... labels) { -// def vertex = getRawVertex() -// if (vertex instanceof Resource) { -// try { -// if (labels.length == 0) { -// return new RyaSailEdgeSequence(sailGraph.getSailConnection().get().getStatements((Resource) vertex, null, null, false), sailGraph); -// } else if (labels.length == 1) { -// return new RyaSailEdgeSequence(sailGraph.getSailConnection().get().getStatements((Resource) vertex, new URIImpl(sailGraph.expandPrefix(labels[0])), null, false), sailGraph); -// } else { -// final List> edges = new ArrayList>(); -// for (final String label: labels) { -// edges.add(new RyaSailEdgeSequence(sailGraph.getSailConnection().get().getStatements((Resource) vertex, new URIImpl(sailGraph.expandPrefix(label)), null, false), sailGraph)); -// } -// return new MultiIterable(edges); -// } -// } catch (SailException e) { -// throw new RuntimeException(e.getMessage(), e); -// } -// } else { -// return new RyaSailEdgeSequence(); -// } -// -// } -// -// @Override -// public Iterable getInEdges(final String... labels) { -// try { -// def vertex = getRawVertex() -// if (labels.length == 0) { -// return new RyaSailEdgeSequence(sailGraph.getSailConnection().get().getStatements(null, null, vertex, false), sailGraph); -// } else if (labels.length == 1) { -// return new RyaSailEdgeSequence(sailGraph.getSailConnection().get().getStatements(null, new URIImpl(sailGraph.expandPrefix(labels[0])), vertex, false), sailGraph); -// } else { -// final List> edges = new ArrayList>(); -// for (final String label: labels) { -// edges.add(new RyaSailEdgeSequence(sailGraph.getSailConnection().get().getStatements(null, new URIImpl(sailGraph.expandPrefix(label)), vertex, false), sailGraph)); -// } -// return new MultiIterable(edges); -// } -// } catch (SailException e) { -// throw new RuntimeException(e.getMessage(), e); -// } -// -// } -//} diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy deleted file mode 100644 index 451955d84..000000000 --- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.blueprints.sail - -import com.google.common.collect.Iterators -import com.google.common.collect.PeekingIterator -import com.tinkerpop.blueprints.Edge -import com.tinkerpop.blueprints.Vertex -import org.openrdf.model.Statement - -/** - * Iterable that provides a distinct list of subjects or objects from statements - * Date: 5/8/12 - * Time: 5:56 PM - */ -class RyaSailVertexSequence implements Iterable, Iterator { - enum VERTEXSIDE { - SUBJECT, OBJECT - } - def PeekingIterator iter - def RyaSailGraph graph - def previous - def vertexSide = VERTEXSIDE.SUBJECT - - RyaSailVertexSequence() { - } - - RyaSailVertexSequence(RyaSailEdgeSequence iter) { - this(iter, VERTEXSIDE.SUBJECT) - } - - RyaSailVertexSequence(RyaSailEdgeSequence iter, VERTEXSIDE vertexSide) { - this.iter = Iterators.peekingIterator(iter) - this.graph = iter.graph - this.vertexSide = vertexSide - } - - @Override - Iterator iterator() { - return this - } - - @Override - boolean hasNext() { - if (iter == null) { - return false - } - while (iter.hasNext()) { - def peek = (RyaSailEdge) iter.peek() - def subject = getVertexSide(peek.getRawEdge()) - if (!(subject.equals(previous))) { - return true - } - iter.next() //keep iterating - } - return false; - } - - @Override - Vertex next() { - if (!this.hasNext()) - throw new NoSuchElementException(); - def next = (RyaSailEdge) iter.next() - Statement statement = next.getRawEdge() - previous = getVertexSide(statement) - return new RyaSailVertex(previous, graph); - } - - def getVertexSide(Statement statement) { - return (VERTEXSIDE.SUBJECT.equals(vertexSide)) ? statement.getSubject() : statement.getObject() - } - - @Override - void remove() { - throw new UnsupportedOperationException(); - } -} diff --git a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java b/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java deleted file mode 100644 index 22eff06a2..000000000 --- a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java +++ /dev/null @@ -1,101 +0,0 @@ -package mvm.rya.blueprints.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.tinkerpop.blueprints.impls.sail.SailEdge; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.StatementImpl; - -/** - * Blueprints Edge for Sail stores - * outVertex edge inVertex - *
- * Groovy is doing something funky with properties and such here - *

- * Date: 5/9/12 - * Time: 9:03 AM - */ -public class RyaSailEdge extends SailEdge { - - public static final String SPLIT = "|"; - - public RyaSailEdge(Statement rawEdge, RyaSailGraph graph) { - super(rawEdge, graph); - } - - @Override - public Object getId() { - Statement statement = this.getRawEdge(); - return formatId(statement); - } - - /** - * Returns a formatted id for a full statement. - * - * @param statement - * @return - */ - public static String formatId(Statement statement) { - if (null != statement.getContext()) - return (new StringBuilder()).append(statement.getSubject()).append(SPLIT).append(statement.getPredicate()).append(SPLIT).append(statement.getObject()).append(SPLIT).append(statement.getContext()).toString(); - else - return (new StringBuilder()).append(statement.getSubject()).append(SPLIT).append(statement.getPredicate()).append(SPLIT).append(statement.getObject()).toString(); - } - -// public static RyaSailEdge fromId(String id, RyaSailGraph graph) { -// def decodedId = URLDecoder.decode(id) -// def statement = RdfIO.readStatement(ByteStreams.newDataInput(decodedId.bytes), RdfCloudTripleStoreConstants.VALUE_FACTORY) -// println statement -// return new RyaSailEdge(statement, graph) -// } - - /** - * @param id formatted from getId method - * @param graph - * @return - */ - public static RyaSailEdge fromId(String id, RyaSailGraph graph) { - assert id != null; - String[] split = id.split("\\|"); - if (split.length < 3) { - return null; - } - String subj_s = split[0].trim(); - Value subj = graph.createValue(subj_s); - String pred_s = split[1].trim(); - Value pred = graph.createValue(pred_s); - String obj_s = split[2].trim(); - Value obj = graph.createValue(obj_s); - if (split.length == 4) { - //context available - Value context = graph.createValue(split[3]); - return new RyaSailEdge(new ContextStatementImpl((Resource) subj, (URI) pred, obj, (Resource) context), graph); - } else { - return new RyaSailEdge(new StatementImpl((Resource) subj, (URI) pred, obj), graph); - } - } - -} diff --git a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java b/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java deleted file mode 100644 index 2bb8f3f9a..000000000 --- a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java +++ /dev/null @@ -1,105 +0,0 @@ -package mvm.rya.blueprints.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.util.MultiIterable; -import com.tinkerpop.blueprints.impls.sail.SailGraph; -import com.tinkerpop.blueprints.impls.sail.SailVertex; -import org.openrdf.model.Resource; -import org.openrdf.model.Value; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.sail.SailException; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * For some reason, the groovy class overwrites the metaclass and Gremlin.load will not change it for properties like outE, both, etc - * Date: 5/10/12 - * Time: 12:35 PM - */ -public class RyaSailVertex extends SailVertex { - - public RyaSailVertex(Value rawVertex, SailGraph graph) { - super(rawVertex, graph); - } - - - @Override - public Iterable getEdges(Direction direction, final String... labels) { - if (direction.equals(Direction.OUT)) - return getOutEdges(labels); - if (direction.equals(Direction.IN)) { - return getInEdges(labels); - } - return new MultiIterable(Arrays.asList(new Iterable[] { getInEdges(labels), getOutEdges(labels) })); - } - - private Iterable getOutEdges(final String... labels) { - Value vertex = getRawVertex(); - if (vertex instanceof Resource) { - try { - if (labels.length == 0) { - return new RyaSailEdgeSequence(getRyaSailGraph().getSailConnection().get().getStatements((Resource) vertex, null, null, false), getRyaSailGraph()); - } else if (labels.length == 1) { - return new RyaSailEdgeSequence(getRyaSailGraph().getSailConnection().get().getStatements((Resource) vertex, new URIImpl(getRyaSailGraph().expandPrefix(labels[0])), null, false), getRyaSailGraph()); - } else { - final List> edges = new ArrayList>(); - for (final String label: labels) { - edges.add(new RyaSailEdgeSequence(getRyaSailGraph().getSailConnection().get().getStatements((Resource) vertex, new URIImpl(getRyaSailGraph().expandPrefix(label)), null, false), getRyaSailGraph())); - } - return new MultiIterable(edges); - } - } catch (SailException e) { - throw new RuntimeException(e.getMessage(), e); - } - } else { - return new RyaSailEdgeSequence(); - } - } - - private Iterable getInEdges(final String... labels) { - try { - Value vertex = getRawVertex(); - if (labels.length == 0) { - return new RyaSailEdgeSequence(getRyaSailGraph().getSailConnection().get().getStatements(null, null, vertex, false), getRyaSailGraph()); - } else if (labels.length == 1) { - return new RyaSailEdgeSequence(getRyaSailGraph().getSailConnection().get().getStatements(null, new URIImpl(getRyaSailGraph().expandPrefix(labels[0])), vertex, false), getRyaSailGraph()); - } else { - final List> edges = new ArrayList>(); - for (final String label: labels) { - edges.add(new RyaSailEdgeSequence(getRyaSailGraph().getSailConnection().get().getStatements(null, new URIImpl(getRyaSailGraph().expandPrefix(label)), vertex, false),getRyaSailGraph())); - } - return new MultiIterable(edges); - } - } catch (SailException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - public RyaSailGraph getRyaSailGraph() { - return (RyaSailGraph) graph; - } -} diff --git a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy deleted file mode 100644 index fe0f4e0a0..000000000 --- a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.blueprints -// -//import com.tinkerpop.blueprints.pgm.impls.sail.SailGraph -//import com.tinkerpop.blueprints.pgm.impls.sail.SailVertex -//import com.tinkerpop.gremlin.groovy.Gremlin -//import mvm.rya.accumulo.AccumuloRdfConfiguration -//import mvm.rya.accumulo.AccumuloRdfDAO -//import mvm.rya.accumulo.AccumuloRdfEvalStatsDAO -// -//import mvm.rya.blueprints.config.RyaGraphConfiguration -//import mvm.rya.rdftriplestore.RdfCloudTripleStore -//import mvm.rya.rdftriplestore.inference.InferenceEngine -//import org.apache.accumulo.core.client.ZooKeeperInstance -//import static mvm.rya.accumulo.mr.utils.MRUtils.* -//import static mvm.rya.api.RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG -//import static mvm.rya.api.RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX -// -///** -// * Date: 5/7/12 -// * Time: 5:39 PM -// */ -//class TstGremlinRya { -// public static void main(String[] args) { -// -// def conf = new AccumuloRdfConfiguration(); - -// conf.setDisplayQueryPlan(true); -// def store = new RdfCloudTripleStore(); -// store.setConf(conf); -// def crdfdao = new AccumuloRdfDAO(); -// def connector = new ZooKeeperInstance("acu13", "stratus25:2181").getConnector("root", "secret"); -// crdfdao.setConnector(connector); -// conf.setTablePrefix("l_"); -// crdfdao.setConf(conf); -// store.setRdfDao(crdfdao); -// def ceval = new AccumuloRdfEvalStatsDAO(); -// ceval.setConnector(connector); -// ceval.setConf(conf); -// store.setRdfEvalStatsDAO(ceval); -// def inferenceEngine = new InferenceEngine(); -// inferenceEngine.setRdfDao(crdfdao); -// inferenceEngine.setConf(conf); -// store.setInferenceEngine(inferenceEngine); -// store.setConf(conf); -// -// Gremlin.load() -// def g = new SailGraph(store) -//// def g = RyaGraphConfiguration.createGraph([(AC_INSTANCE_PROP): "acu13", (AC_ZK_PROP): "stratus25:2181",(AC_USERNAME_PROP): "root", (AC_PWD_PROP): "secret", (CONF_TBL_PREFIX): "l_", (CONF_QUERYPLAN_FLAG): "true"]); -// -// def v = g.getVertex('http://www.Department0.University0.edu/GraduateCourse0'); -// def v2 = g.getVertex('http://www.Department0.University0.edu/GraduateCourse1'); -//// v.getInEdges().each { -//// println it -//// } -//// v.getInEdges('urn:lubm:rdfts#takesCourse').each { -//// println it -//// } -//// def gc0 = g.getVertex('http://www.Department0.University0.edu/GraduateCourse0') -//// gc0.getOutEdges().each { -//// println it.getInVertex() -//// } -//// -//// def gc0_lit = g.getVertex('\"GraduateCourse0\"') -//// println gc0_lit -// -// v = g.getVertex('http://dbpedia.org/resource/Albert_Camus') -// println v.outE.each { -// println it.label -// } -// -// g.shutdown() -// -// g = RyaGraphConfiguration.createGraph([(AC_INSTANCE_PROP): "acu13", (AC_ZK_PROP): "stratus25:2181",(AC_USERNAME_PROP): "root", (AC_PWD_PROP): "secret", (CONF_TBL_PREFIX): "l_", (CONF_QUERYPLAN_FLAG): "true"]); -// -// def rv = g.getVertex('http://dbpedia.org/resource/Albert_Camus') -// println rv.outE.each { -// println it.label -// } -// -// v = new SailVertex(rv.getRawVertex(), rv.sailGraph) -// println v.outE -// -// g.shutdown() -//// -//// def name = { -//// println it.name -//// } -//// println SailVertex.metaClass.properties.each(name) -//// println RyaSailVertex.metaClass.properties.each(name) -//// println RyaSailVertex.metaClass.properties.each(name) -// } -//} diff --git a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy deleted file mode 100644 index 9dd0627ce..000000000 --- a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.blueprints.config - -import com.tinkerpop.blueprints.Vertex -import com.tinkerpop.blueprints.Direction -import junit.framework.TestCase -import mvm.rya.api.RdfCloudTripleStoreConstants -import mvm.rya.api.resolver.RdfToRyaConversions -import mvm.rya.blueprints.sail.RyaSailEdge -import org.openrdf.model.ValueFactory -import org.openrdf.model.impl.StatementImpl -import org.openrdf.model.impl.ValueFactoryImpl - -import static mvm.rya.api.RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX -import static mvm.rya.accumulo.mr.utils.MRUtils.* -import org.apache.accumulo.core.security.Authorizations -import org.apache.accumulo.core.client.Connector -import mvm.rya.accumulo.AccumuloRyaDAO -import mvm.rya.accumulo.AccumuloRdfConfiguration -import org.apache.accumulo.core.client.mock.MockInstance -import org.apache.accumulo.core.client.admin.SecurityOperations -import org.apache.accumulo.core.Constants -import org.apache.accumulo.core.security.TablePermission - -/** - * Date: 5/9/12 - * Time: 3:11 PM - */ -class RyaGraphConfigurationTest extends TestCase { - private String user = "user"; - - private String pwd = "pwd"; - private String instance = "myinstance"; - private String tablePrefix = "t_"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - private AccumuloRyaDAO ryaDAO; - private ValueFactory vf = new ValueFactoryImpl(); - private String namespace = "urn:test#"; - private AccumuloRdfConfiguration conf; - - @Override - public void setUp() throws Exception { - super.setUp(); - connector = new MockInstance(instance).getConnector(user, pwd.getBytes()); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - SecurityOperations secOps = connector.securityOperations(); - secOps.createUser(user, pwd.getBytes(), auths); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ); - - conf = new AccumuloRdfConfiguration(); - ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConnector(connector); - conf.setTablePrefix(tablePrefix); - ryaDAO.setConf(conf); - ryaDAO.init(); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - } - - public void testGraphConfiguration() { - def a = vf.createURI(namespace, "a") - def statement = new StatementImpl(a, vf.createURI(namespace, "p"), vf.createLiteral("l")) - def statement2 = new StatementImpl(a, vf.createURI(namespace, "p2"), vf.createLiteral("l")) - ryaDAO.add(RdfToRyaConversions.convertStatement(statement)); - ryaDAO.add(RdfToRyaConversions.convertStatement(statement2)); - ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "b"), vf.createURI(namespace, "p"), vf.createLiteral("l")))); - ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "c"), vf.createURI(namespace, "n"), vf.createLiteral("l")))); - - RyaGraphConfiguration.load() - - def graph = RyaGraphConfiguration.createGraph( - [(AC_INSTANCE_PROP): instance, - (AC_MOCK_PROP): "true", - (AC_USERNAME_PROP): user, - (AC_PWD_PROP): pwd, - (CONF_TBL_PREFIX): tablePrefix, -// (CONF_QUERYPLAN_FLAG): "true", - ] - ); - - def edge = graph.getEdge(RyaSailEdge.formatId(statement)) - assertNotNull(edge) - Vertex vertex = graph.getVertex(a.stringValue()) - assertNotNull(vertex) - def edges = vertex.getEdges(Direction.OUT).iterator().toList() - assertEquals(2, edges.size()) - assertNotNull edges[0].subj - assertNotNull edges[0].pred - assertNotNull edges[0].obj - assertNull edges[0].cntxt - - def queryEdges = graph.query(edges[0].subj, edges[0].pred, edges[0].obj, edges[0].cntxt) - assertEquals edges[0], queryEdges[0] - - graph.shutdown() - } -} diff --git a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy deleted file mode 100644 index c66135071..000000000 --- a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package mvm.rya.blueprints.sail - -import mvm.rya.api.utils.IteratorWrapper -import junit.framework.TestCase -import mvm.rya.blueprints.config.RyaGraphConfiguration -import org.openrdf.model.Statement -import static mvm.rya.accumulo.mr.utils.MRUtils.* -import static mvm.rya.api.RdfCloudTripleStoreConstants.VALUE_FACTORY - -/** - * Date: 5/10/12 - * Time: 8:55 AM - */ -class RyaSailVertexSequenceTest extends TestCase { - - public void testDistinctSubjects() { - def namespace = "urn:test#" - def vf = VALUE_FACTORY - def graph = RyaGraphConfiguration.createGraph( - [(AC_INSTANCE_PROP): "inst", - (AC_MOCK_PROP): "true", - (AC_USERNAME_PROP): "user", - (AC_PWD_PROP): "pwd", - ] - ); - - def a = vf.createURI(namespace, "a") - def b = vf.createURI(namespace, "b") - def c = vf.createURI(namespace, "c") - def statements = [ - vf.createStatement(a, vf.createURI(namespace, "p"), vf.createURI(namespace, "l1")), - vf.createStatement(a, vf.createURI(namespace, "p"), vf.createURI(namespace, "l2")), - vf.createStatement(a, vf.createURI(namespace, "p"), vf.createURI(namespace, "l3")), - vf.createStatement(b, vf.createURI(namespace, "p"), vf.createURI(namespace, "l1")), - vf.createStatement(c, vf.createURI(namespace, "p"), vf.createURI(namespace, "l1")), - vf.createStatement(c, vf.createURI(namespace, "p"), vf.createURI(namespace, "l2")), - vf.createStatement(c, vf.createURI(namespace, "p"), vf.createURI(namespace, "l3")), - ] - def edgeSeq = new RyaSailEdgeSequence(new IteratorWrapper(statements.iterator()), graph) - def vertexSeq = new RyaSailVertexSequence(edgeSeq) - def expectedList = [a, b, c] - def list = vertexSeq.toList().collect { v -> - v.getRawVertex() - } - assertEquals(expectedList, list) - } - - public void testDistinctObjects() { - def namespace = "urn:test#" - def vf = VALUE_FACTORY - def graph = RyaGraphConfiguration.createGraph( - [(AC_INSTANCE_PROP): "inst", - (AC_MOCK_PROP): "true", - (AC_USERNAME_PROP): "user", - (AC_PWD_PROP): "pwd", - ] - ); - def a = vf.createURI(namespace, "a") - def b = vf.createURI(namespace, "b") - def c = vf.createURI(namespace, "c") - def l1 = vf.createURI(namespace, "l1") - def l2 = vf.createURI(namespace, "l2") - def l3 = vf.createURI(namespace, "l3") - def statements = [ - vf.createStatement(a, vf.createURI(namespace, "p"), l1), - vf.createStatement(b, vf.createURI(namespace, "p"), l1), - vf.createStatement(c, vf.createURI(namespace, "p"), l1), - vf.createStatement(a, vf.createURI(namespace, "p"), l2), - vf.createStatement(c, vf.createURI(namespace, "p"), l2), - vf.createStatement(a, vf.createURI(namespace, "p"), l3), - vf.createStatement(c, vf.createURI(namespace, "p"), l3), - ] - def edgeSeq = new RyaSailEdgeSequence(new IteratorWrapper(statements.iterator()), graph) - def vertexSeq = new RyaSailVertexSequence(edgeSeq, RyaSailVertexSequence.VERTEXSIDE.OBJECT) - def expectedList = [l1, l2, l3] - def list = vertexSeq.toList().collect { v -> - v.getRawVertex() - } - assertEquals(expectedList, list) - } -} diff --git a/extras/tinkerpop.rya/src/test/resources/log4j.properties b/extras/tinkerpop.rya/src/test/resources/log4j.properties deleted file mode 100644 index 598d7b5f0..000000000 --- a/extras/tinkerpop.rya/src/test/resources/log4j.properties +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - - diff --git a/osgi/alx.rya.console/pom.xml b/osgi/alx.rya.console/pom.xml deleted file mode 100644 index 9f5020c9d..000000000 --- a/osgi/alx.rya.console/pom.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.osgi - 3.2.10-SNAPSHOT - - - alx.rya.console - Apache Rya ALX Console - - bundle - - - - org.apache.rya - rya.api - - - - org.openrdf.sesame - sesame-repository-api - - - org.apache.karaf.shell - org.apache.karaf.shell.console - provided - - - - - - - org.apache.felix - maven-bundle-plugin - - - - - diff --git a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java deleted file mode 100644 index 7fada6642..000000000 --- a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java +++ /dev/null @@ -1,58 +0,0 @@ -package mvm.rya.alx.command; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.persist.RyaDAO; -import org.apache.karaf.shell.console.OsgiCommandSupport; -import org.openrdf.repository.Repository; -import org.osgi.util.tracker.ServiceTracker; - -public abstract class AbstractRyaCommand extends OsgiCommandSupport { - - protected Repository repository; - protected RyaDAO rdfDAO; - - @Override - protected Object doExecute() throws Exception { - ServiceTracker serviceTracker = new ServiceTracker(getBundleContext(), Repository.class.getName(), null); - serviceTracker.open(); - repository = (Repository) serviceTracker.getService(); - serviceTracker.close(); - if (repository == null) { - System.out.println("Sail Repository not available"); - return null; - } - - serviceTracker = new ServiceTracker(getBundleContext(), RyaDAO.class.getName(), null); - serviceTracker.open(); - rdfDAO = (RyaDAO) serviceTracker.getService(); - serviceTracker.close(); - if (rdfDAO == null) { - System.out.println("Rdf DAO not available"); - return null; - } - - return doRyaExecute(); - } - - protected abstract Object doRyaExecute() throws Exception; -} diff --git a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java deleted file mode 100644 index 658f3fc99..000000000 --- a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java +++ /dev/null @@ -1,80 +0,0 @@ -package mvm.rya.alx.command; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.felix.gogo.commands.Command; -import org.apache.felix.gogo.commands.Option; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryResult; - -import static mvm.rya.api.RdfCloudTripleStoreUtils.*; - -/** - * Date: 5/16/12 - * Time: 1:23 PM - */ -@Command(scope = "rya", name = "getstatements", description = "Print statements to screen based on triple pattern") -public class GetStatementsRyaCommand extends AbstractRyaCommand { - @Option(name = "-s", aliases = {"--subject"}, description = "Subject of triple pattern", required = false, multiValued = false) - private String subject; - @Option(name = "-p", aliases = {"--predicate"}, description = "Predicate of triple pattern", required = false, multiValued = false) - private String predicate; - @Option(name = "-o", aliases = {"--object"}, description = "Object of triple pattern", required = false, multiValued = false) - private String object; - @Option(name = "-c", aliases = {"--context"}, description = "Context of triple pattern", required = false, multiValued = false) - private String context; - - @Override - protected Object doRyaExecute() throws Exception { - if (subject == null && predicate == null && object == null && context == null) { - System.out.println("Please specify subject|predicate|object|context"); - return null; - } - - System.out.println(subject); - System.out.println(predicate); - System.out.println(object); - System.out.println(context); - RepositoryConnection connection = null; - try { - connection = repository.getConnection(); - RepositoryResult statements = connection.getStatements( - (subject != null) ? (Resource) createValue(subject) : null, - (predicate != null) ? (URI) createValue(predicate) : null, - (object != null) ? createValue(object) : null, - false, - (context != null) ? new Resource[]{(Resource) createValue(context)} : new Resource[0]); - while(statements.hasNext()) { - System.out.println(statements.next()); - } - statements.close(); - } finally { - if (connection != null) { - connection.close(); - } - } - return null; - } -} diff --git a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java deleted file mode 100644 index 19b002f93..000000000 --- a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java +++ /dev/null @@ -1,46 +0,0 @@ -package mvm.rya.alx.command; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import org.apache.felix.gogo.commands.Command; - -import java.util.Map; - -/** - * Date: 5/16/12 - * Time: 11:04 AM - */ -@Command(scope = "rya", name = "info", description = "Displays information about the running Rya instance") -public class InfoRyaCommand extends AbstractRyaCommand { - - @Override - protected Object doRyaExecute() throws Exception { - System.out.println("******************RYA Configuration******************"); - RdfCloudTripleStoreConfiguration conf = rdfDAO.getConf(); - for (Map.Entry next : conf) { - System.out.println(next.getKey() + ":\t\t" + next.getValue()); - } - System.out.println("*****************************************************"); - return null; - } -} diff --git a/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml b/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml deleted file mode 100644 index 129e9c75a..000000000 --- a/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/osgi/alx.rya/pom.xml b/osgi/alx.rya/pom.xml deleted file mode 100644 index e2ca105af..000000000 --- a/osgi/alx.rya/pom.xml +++ /dev/null @@ -1,86 +0,0 @@ - - - - - 4.0.0 - - org.apache.rya - rya.osgi - 3.2.10-SNAPSHOT - - - alx.rya - Apache Rya ALX - - bundle - - - - org.apache.rya - accumulo.rya - - - junit - junit - test - - - - - - org.apache.felix - maven-bundle-plugin - ${maven-bundle-plugin.version} - true - - - *,net.sf.cglib.proxy - * - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.7 - - - attach-artifacts - package - - attach-artifact - - - - - - src/main/features/alx.rya-features.xml - - xml - features - - - - - - - - - diff --git a/osgi/alx.rya/src/main/features/alx.rya-features.xml b/osgi/alx.rya/src/main/features/alx.rya-features.xml deleted file mode 100644 index 9e36c33f1..000000000 --- a/osgi/alx.rya/src/main/features/alx.rya-features.xml +++ /dev/null @@ -1,104 +0,0 @@ - - - - - - - wrap:mvn:org.openrdf.sesame/sesame-model/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-runtime/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-query/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryalgebra-model/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryparser-api/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryparser-serql/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryparser-sparql/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryresultio-api/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryresultio-binary/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryresultio-sparqljson/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryresultio-text/2.6.4 - wrap:mvn:net.sf.opencsv/opencsv/2.0 - wrap:mvn:org.openrdf.sesame/sesame-repository-api/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-repository-manager/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-repository-event/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-repository-sail/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-sail-memory/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-sail-inferencer/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-queryalgebra-evaluation/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-repository-sparql/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-repository-http/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-http-client/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-repository-dataset/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-repository-contextaware/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-http-protocol/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-ntriples/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-api/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-binary/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-n3/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-trix/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-turtle/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-trig/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-sail-api/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-sail-nativerdf/2.6.4 - - wrap:mvn:org.openrdf.sesame/sesame-queryresultio-sparqlxml/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-util/2.6.4 - wrap:mvn:org.openrdf.sesame/sesame-rio-rdfxml/2.6.4 - - - wrap:mvn:com.tinkerpop.blueprints/blueprints-core/1.2 - mvn:org.codehaus.jettison/jettison/1.3 - wrap:mvn:stax/stax-api/1.0.1 - - mvn:org.codehaus.jackson/jackson-core-asl/1.8.5 - mvn:org.codehaus.jackson/jackson-mapper-asl/1.8.5 - - - - tinkerpop.blueprints - google.guava - mvn:mvm.rya/sesame-runtime-osgi/2.6.4 - wrap:mvn:mvm.rya/rya.api/3.0.4-SNAPSHOT - wrap:mvn:mvm.rya/rya.sail.impl/3.0.4-SNAPSHOT - - - mvm.alx.connect.cloudbase.connect - google.guava - wrap:mvn:mvm.rya/cloudbase.rya/3.0.4-SNAPSHOT - wrap:mvn:mvm.rya/cloudbase.utils/1.0.1-SNAPSHOT - - - mvm.alx.accumulo.connect - google.guava - wrap:mvn:mvm.rya/accumulo.rya/3.0.4-SNAPSHOT - - - pax-web - google.guava - rya.sail - accumulo.rya - - mvn:mvm.rya/alx.rya/3.0.4-SNAPSHOT - - - alx.rya - mvn:mvm.rya/alx.rya.console/3.0.4-SNAPSHOT - - diff --git a/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java b/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java deleted file mode 100644 index 849cf6022..000000000 --- a/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java +++ /dev/null @@ -1,53 +0,0 @@ -package mvm.rya.alx.util; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import org.apache.hadoop.conf.Configuration; - -import java.util.Map; - -/** - */ -public class ConfigurationFactory { - private Map properties; - - public RdfCloudTripleStoreConfiguration getConfiguration() { - RdfCloudTripleStoreConfiguration conf = new AccumuloRdfConfiguration(); - if (properties != null) { - for (Map.Entry prop : properties.entrySet()) { - conf.set(prop.getKey(), prop.getValue()); - } - conf.setTablePrefix(conf.getTablePrefix()); - } - return conf; - } - - public Map getProperties() { - return properties; - } - - public void setProperties(Map properties) { - this.properties = properties; - } -} diff --git a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml b/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml deleted file mode 100644 index 76f1bd36e..000000000 --- a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - l_ - true - - - - - - - - - - - - diff --git a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml b/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml deleted file mode 100644 index 4ef9ac0e8..000000000 --- a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml b/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml deleted file mode 100644 index cec91f6f6..000000000 --- a/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - diff --git a/osgi/camel.rya/pom.xml b/osgi/camel.rya/pom.xml deleted file mode 100644 index ade8e0345..000000000 --- a/osgi/camel.rya/pom.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.osgi - 3.2.10-SNAPSHOT - - - camel.rya - Apache Rya Camel - - bundle - - - - org.apache.rya - rya.sail - - - - org.apache.camel - camel-core - - - org.apache.camel - camel-test - - - - org.apache.rya - accumulo.rya - test - - - org.slf4j - slf4j-log4j12 - test - - - - - - org.apache.felix - maven-bundle-plugin - - - - - diff --git a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java deleted file mode 100644 index 0bbc07c78..000000000 --- a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java +++ /dev/null @@ -1,59 +0,0 @@ -package mvm.rya.camel.cbsail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.camel.Endpoint; -import org.apache.camel.impl.DefaultComponent; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.repository.Repository; -import org.openrdf.repository.sail.SailRepository; - -import java.util.Map; - -import static com.google.common.base.Preconditions.*; -/** - * Save and retrieve triples - */ -public class CbSailComponent extends DefaultComponent { - public static final String SAILREPONAME = "sailRepoName"; - - public static final String ENDPOINT_URI = "cbsail"; - public static final String SPARQL_QUERY_PROP = "cbsail.sparql"; - public static final String START_TIME_QUERY_PROP = "cbsail.startTime"; - public static final String TTL_QUERY_PROP = "cbsail.ttl"; - public static final ValueFactory valueFactory = new ValueFactoryImpl(); - - @Override - protected Endpoint createEndpoint(String uri, String remaining, Map parameters) throws Exception { - String sailRepoNameParam = Repository.class.getName(); - if (parameters.containsKey(sailRepoNameParam)) { - sailRepoNameParam = getAndRemoveParameter(parameters, SAILREPONAME, String.class); - } - Repository sailRepository = getCamelContext().getRegistry().lookup(sailRepoNameParam, Repository.class); - checkNotNull(sailRepository, "Sail Repository must exist within the camel registry. Using lookup name[" + sailRepoNameParam + "]"); - - CbSailEndpoint sailEndpoint = new CbSailEndpoint(uri, this, sailRepository, remaining); - setProperties(sailEndpoint, parameters); - return sailEndpoint; - } -} diff --git a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java deleted file mode 100644 index 4a89291c2..000000000 --- a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java +++ /dev/null @@ -1,119 +0,0 @@ -package mvm.rya.camel.cbsail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.camel.*; -import org.apache.camel.impl.DefaultEndpoint; -import org.openrdf.repository.Repository; - -import static com.google.common.base.Preconditions.*; - -/** - * setHeader(SPARQL, sqarlQuery).setHeader(TTL, ttl).to("cbsail:server?port=2181&user=user&pwd=pwd&instanceName=name").getBody() - */ -public class CbSailEndpoint extends DefaultEndpoint { - - - public enum CbSailOutput { - XML, BINARY - } - - private Long ttl; - private Repository sailRepository; - private String sparql; - private String tablePrefix; - private boolean infer = true; - private String queryOutput = CbSailOutput.BINARY.toString(); - - public CbSailEndpoint(String endpointUri, Component component, Repository sailRepository, String remaining) { - super(endpointUri, component); - this.sailRepository = sailRepository; - } - - protected void validate() { - checkNotNull(sailRepository); - } - - @Override - public Producer createProducer() throws Exception { - validate(); - return new CbSailProducer(this); - } - - @Override - public Consumer createConsumer(Processor processor) throws Exception { - throw new RuntimeCamelException((new StringBuilder()).append("Cannot consume from a CbSailEndpoint: ").append(getEndpointUri()).toString()); - } - - @Override - public boolean isSingleton() { - return true; - } - - public Long getTtl() { - return ttl; - } - - public void setTtl(Long ttl) { - this.ttl = ttl; - } - - public String getSparql() { - return sparql; - } - - public void setSparql(String sparql) { - this.sparql = sparql; - } - - public String getTablePrefix() { - return tablePrefix; - } - - public void setTablePrefix(String tablePrefix) { - this.tablePrefix = tablePrefix; - } - - public boolean isInfer() { - return infer; - } - - public void setInfer(boolean infer) { - this.infer = infer; - } - - public String getQueryOutput() { - return queryOutput; - } - - public void setQueryOutput(String queryOutput) { - this.queryOutput = queryOutput; - } - - public Repository getSailRepository() { - return sailRepository; - } - - public void setSailRepository(Repository sailRepository) { - this.sailRepository = sailRepository; - } -} diff --git a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java deleted file mode 100644 index ce3ff5532..000000000 --- a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java +++ /dev/null @@ -1,175 +0,0 @@ -package mvm.rya.camel.cbsail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.camel.Exchange; -import org.apache.camel.impl.DefaultProducer; -import org.openrdf.model.Statement; -import org.openrdf.query.*; -import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryException; -import org.openrdf.rio.RDFHandlerException; - -import java.io.ByteArrayOutputStream; -import java.util.*; - -import static mvm.rya.api.RdfCloudTripleStoreConfiguration.*; -import static mvm.rya.camel.cbsail.CbSailComponent.SPARQL_QUERY_PROP; -import static mvm.rya.camel.cbsail.CbSailComponent.valueFactory; - -/** - */ -public class CbSailProducer extends DefaultProducer { - - private RepositoryConnection connection; - - private CbSailEndpoint.CbSailOutput queryOutput = CbSailEndpoint.CbSailOutput.BINARY; - - public CbSailProducer(CbSailEndpoint endpoint) { - super(endpoint); - } - - @Override - public void process(final Exchange exchange) throws Exception { - //If a query is set in the header or uri, use it - Collection queries = new ArrayList(); - Collection tmp = exchange.getIn().getHeader(SPARQL_QUERY_PROP, Collection.class); - if (tmp != null) { - queries = tmp; - } else { - String query = exchange.getIn().getHeader(SPARQL_QUERY_PROP, String.class); - if (query != null) { - queries.add(query); - } - } - - if (queries.size() > 0) - sparqlQuery(exchange, queries); - else - inputTriples(exchange); - } - - protected void inputTriples(Exchange exchange) throws RepositoryException { - Object body = exchange.getIn().getBody(); - if (body instanceof Statement) { - //save statement - inputStatement((Statement) body); - } else if (body instanceof List) { - //save list of statements - List lst = (List) body; - for (Object obj : lst) { - if (obj instanceof Statement) - inputStatement((Statement) obj); - } - } - connection.commit(); - exchange.getOut().setBody(Boolean.TRUE); - } - - protected void inputStatement(Statement stmt) throws RepositoryException { - connection.add(stmt.getSubject(), stmt.getPredicate(), stmt.getObject()); - } - - protected void sparqlQuery(Exchange exchange, Collection queries) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException, RDFHandlerException { - - List list = new ArrayList(); - for (String query : queries) { - -// Long startTime = exchange.getIn().getHeader(START_TIME_QUERY_PROP, Long.class); -// Long ttl = exchange.getIn().getHeader(TTL_QUERY_PROP, Long.class); - String auth = exchange.getIn().getHeader(CONF_QUERY_AUTH, String.class); - Boolean infer = exchange.getIn().getHeader(CONF_INFER, Boolean.class); - - Object output = performSelect(query, auth, infer); - if (queries.size() == 1) { - exchange.getOut().setBody(output); - return; - } else - list.add(output); - - } - exchange.getOut().setBody(list); - } - - protected Object performSelect(String query, String auth, Boolean infer) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException { - TupleQuery tupleQuery = connection.prepareTupleQuery( - QueryLanguage.SPARQL, query); - if (auth != null && auth.length() > 0) - tupleQuery.setBinding(CONF_QUERY_AUTH, valueFactory.createLiteral(auth)); - if (infer != null) - tupleQuery.setBinding(CONF_INFER, valueFactory.createLiteral(infer)); - if (CbSailEndpoint.CbSailOutput.BINARY.equals(queryOutput)) { - final List listOutput = new ArrayList(); - TupleQueryResultHandlerBase handler = new TupleQueryResultHandlerBase() { - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - Map map = new HashMap(); - for (String s : bindingSet.getBindingNames()) { - map.put(s, bindingSet.getBinding(s).getValue().stringValue()); - } - listOutput.add(map); - } - }; - tupleQuery.evaluate(handler); - return listOutput; - } else if (CbSailEndpoint.CbSailOutput.XML.equals(queryOutput)) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(baos); - tupleQuery.evaluate(sparqlWriter); - return new String(baos.toByteArray()); - } else { - throw new IllegalArgumentException("Query Output[" + queryOutput + "] is not recognized"); - } - } - -// protected Object performConstruct(String query, Long ttl, Long startTime) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException, RDFHandlerException { -// GraphQuery tupleQuery = connection.prepareGraphQuery( -// QueryLanguage.SPARQL, query); -// if (ttl != null && ttl > 0) -// tupleQuery.setBinding("ttl", valueFactory.createLiteral(ttl)); -// if (startTime != null && startTime > 0) -// tupleQuery.setBinding("startTime", valueFactory.createLiteral(startTime)); -// if (CbSailEndpoint.CbSailOutput.BINARY.equals(queryOutput)) { -// throw new IllegalArgumentException("In Graph Construct mode, cannot return Java object"); -// } else if (CbSailEndpoint.CbSailOutput.XML.equals(queryOutput)) { -// ByteArrayOutputStream baos = new ByteArrayOutputStream(); -// RDFXMLWriter rdfWriter = new RDFXMLWriter(baos); -// tupleQuery.evaluate(rdfWriter); -// return new String(baos.toByteArray()); -// } else { -// throw new IllegalArgumentException("Query Output[" + queryOutput + "] is not recognized"); -// } -// } - - - @Override - protected void doStart() throws Exception { - CbSailEndpoint cbSailEndpoint = (CbSailEndpoint) getEndpoint(); - connection = cbSailEndpoint.getSailRepository().getConnection(); - } - - @Override - protected void doStop() throws Exception { - connection.close(); - } -} diff --git a/osgi/camel.rya/src/main/resources/META-INF/services/org/apache/camel/component/cbsail b/osgi/camel.rya/src/main/resources/META-INF/services/org/apache/camel/component/cbsail deleted file mode 100644 index 69cfb2d3f..000000000 --- a/osgi/camel.rya/src/main/resources/META-INF/services/org/apache/camel/component/cbsail +++ /dev/null @@ -1 +0,0 @@ -class=mvm.rya.camel.cbsail.CbSailComponent \ No newline at end of file diff --git a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java deleted file mode 100644 index d4f53da66..000000000 --- a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java +++ /dev/null @@ -1,117 +0,0 @@ -package mvm.rya.camel.cbsail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.camel.cbsail.CbSailComponent; -import org.apache.camel.EndpointInject; -import org.apache.camel.Exchange; -import org.apache.camel.Processor; -import org.apache.camel.ProducerTemplate; -import org.apache.camel.builder.RouteBuilder; -import org.apache.camel.test.CamelTestSupport; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -import java.util.HashMap; - -public class CbSailIntegrationTest extends CamelTestSupport { - - @EndpointInject(uri = "cbsail:tquery?server=stratus13&port=2181&user=root&pwd=password&instanceName=stratus") - ProducerTemplate producer; - - public void testCbSail() throws Exception { - String underGradInfo = "PREFIX rdf: " + - " PREFIX ub: " + - " SELECT * WHERE" + - " {" + - " ?pred ?obj ." + - " }"; - HashMap map = new HashMap(); - map.put(CbSailComponent.SPARQL_QUERY_PROP, underGradInfo); - map.put(CbSailComponent.START_TIME_QUERY_PROP, 0l); - map.put(CbSailComponent.TTL_QUERY_PROP, 86400000l); - Object o = producer.requestBodyAndHeaders(null, map); - System.out.println(o); - Thread.sleep(100000); - } - - @Override - protected RouteBuilder createRouteBuilder() { - return new RouteBuilder() { - - @Override - public void configure() throws Exception { - ValueFactory vf = new ValueFactoryImpl(); - String underGradInfo = "PREFIX rdf: " + - " PREFIX ub: " + - " SELECT * WHERE" + - " {" + - " ?pred ?obj ." + - " }"; - String rawEvents = "PREFIX nh: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?uuid nh:timestamp ?timestamp.\n" + - " ?uuid nh:site ?site;\n" + - " nh:system ?system;\n" + - " nh:dataSupplier ?dataSupplier;\n" + - " nh:dataType ?dataType;\n" + - " ?data.\n" + - " } LIMIT 100"; - String latestModels = "PREFIX nh: " + - " PREFIX xsd: " + - " SELECT * WHERE" + - " {" + - " ?modelUuid nh:dayOfWeek \"5\";" + - " nh:hourOfDay \"3\";" + - " nh:timestamp ?timestamp;" + -// " FILTER (xsd:integer(?timestamp) > 1297652964633)." + - " nh:dataProperty \"count\";" + - " nh:modelType \"mvm.learning.tpami.SimpleGaussianMMModel\";" + - " nh:site ?site;" + - " nh:dataSupplier ?dataSupplier;" + - " nh:system ?system;" + - " nh:dataType ?dataType;" + - " nh:model ?model;" + - " nh:key ?key." + - " }"; - - from("timer://foo?fixedRate=true&period=60000"). - setHeader(CbSailComponent.SPARQL_QUERY_PROP, constant(underGradInfo)). -// setBody(constant(new StatementImpl(vf.createURI("http://www.Department0.University0.edu/UndergraduateStudent610"), vf.createURI("urn:test:onto:univ#testPred"), vf.createLiteral("test")))). - to("cbsail:tquery?server=stratus13&port=2181&user=root&pwd=password&instanceName=stratus&queryOutput=XML" + -// "&ttl=259200000" -// + "&sparql=" + latestModels" + - "").process(new Processor() { - - @Override - public void process(Exchange exchange) throws Exception { - System.out.println(exchange.getIn().getBody()); -// if (body != null) -// System.out.println(body.size()); - } - }).end(); - } - }; - } - -} diff --git a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java deleted file mode 100644 index ddb056ed3..000000000 --- a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java +++ /dev/null @@ -1,45 +0,0 @@ -package mvm.rya.camel.cbsail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.camel.cbsail.CbSailComponent; -import org.apache.camel.EndpointInject; -import org.apache.camel.ProducerTemplate; - -/** - * Class CbSailPojoMain - * Date: May 3, 2011 - * Time: 11:20:23 PM - */ -public class CbSailPojoMain { - - @EndpointInject(uri = "cbsail:tquery?server=stratus13&port=2181&user=root&pwd=password&instanceName=stratus") - ProducerTemplate producer; - - public void executeQuery(String sparql) { - Object o = producer.requestBodyAndHeader(null, CbSailComponent.SPARQL_QUERY_PROP, sparql); - System.out.println(o); - } - - public static void main(String[] args) { - } -} diff --git a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java deleted file mode 100644 index c52d094e0..000000000 --- a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java +++ /dev/null @@ -1,205 +0,0 @@ -package mvm.rya.camel.cbsail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; -import mvm.rya.rdftriplestore.RyaSailRepository; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.namespace.NamespaceManager; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.camel.EndpointInject; -import org.apache.camel.Produce; -import org.apache.camel.ProducerTemplate; -import org.apache.camel.builder.RouteBuilder; -import org.apache.camel.component.mock.MockEndpoint; -import org.apache.camel.impl.JndiRegistry; -import org.apache.camel.test.CamelTestSupport; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.repository.Repository; -import org.openrdf.repository.RepositoryConnection; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - */ -public class CbSailTest extends CamelTestSupport { - - static String litdupsNS = "urn:test:litdups#"; - - private RdfCloudTripleStore store; - private Repository repository; - private ValueFactory vf = RdfCloudTripleStoreConstants.VALUE_FACTORY; - - @EndpointInject(uri = "mock:results") - protected MockEndpoint resultEndpoint; - - @Produce(uri = "direct:query") - protected ProducerTemplate template; - - @Override - public void setUp() throws Exception { - super.setUp(); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - repository.shutDown(); - } - - @Override - protected JndiRegistry createRegistry() throws Exception { - store = new MockRdfCloudStore(); -// store.setDisplayQueryPlan(true); -// store.setInferencing(false); - NamespaceManager nm = new NamespaceManager(store.getRyaDAO(), store.getConf()); - store.setNamespaceManager(nm); - repository = new RyaSailRepository(store); - repository.initialize(); - - JndiRegistry registry = super.createRegistry(); - registry.bind(Repository.class.getName(), repository); - return registry; - } - - @Override - protected RouteBuilder createRouteBuilder() { - return new RouteBuilder() { - - @Override - public void configure() throws Exception { - from("direct:query"). - to("cbsail:queryEndpoint"). - to("mock:results"); - } - }; - } - - public void testSimpleQuery() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI cpu = vf.createURI(litdupsNS, "cpu"); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - conn.add(cpu, loadPerc, uri1); - conn.commit(); - conn.close(); - - resultEndpoint.expectedMessageCount(1); - - //query through camel - String query = "select * where {" + - "<" + cpu.toString() + "> ?p ?o1." + - "}"; - template.sendBodyAndHeader(null, CbSailComponent.SPARQL_QUERY_PROP, query); - - assertMockEndpointsSatisfied(); - } - - public void testSimpleQueryAuth() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI cpu = vf.createURI(litdupsNS, "cpu"); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - URI uri2 = vf.createURI(litdupsNS, "uri2"); - URI auth1 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "auth1"); - conn.add(cpu, loadPerc, uri1, auth1); - conn.add(cpu, loadPerc, uri2); - conn.commit(); - conn.close(); - - resultEndpoint.expectedMessageCount(1); - - //query through camel - String query = "select * where {" + - "<" + cpu.toString() + "> ?p ?o1." + - "}"; - template.sendBodyAndHeader(null, CbSailComponent.SPARQL_QUERY_PROP, query); - - assertMockEndpointsSatisfied(); - - resultEndpoint.expectedMessageCount(2); - - query = "select * where {" + - "<" + cpu.toString() + "> ?p ?o1." + - "}"; - Map headers = new HashMap(); - headers.put(CbSailComponent.SPARQL_QUERY_PROP, query); - headers.put(RdfCloudTripleStoreConfiguration.BINDING_AUTH, "auth1"); - template.sendBodyAndHeaders(null, headers); - - assertMockEndpointsSatisfied(); - } - - public void testInsertData() throws Exception { - URI cpu = vf.createURI(litdupsNS, "cpu"); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - URI uri2 = vf.createURI(litdupsNS, "uri2"); - List insert = new ArrayList(); - insert.add(new StatementImpl(cpu, loadPerc, uri1)); - insert.add(new StatementImpl(cpu, loadPerc, uri2)); - - resultEndpoint.expectedBodiesReceived(true); - template.sendBody(insert); - assertMockEndpointsSatisfied(); - - resultEndpoint.expectedMessageCount(2); - String query = "select * where {" + - "<" + cpu.toString() + "> ?p ?o1." + - "}"; - template.sendBodyAndHeader(null, CbSailComponent.SPARQL_QUERY_PROP, query); - assertMockEndpointsSatisfied(); - } - - public class MockRdfCloudStore extends RdfCloudTripleStore { - - public MockRdfCloudStore() { - super(); - Instance instance = new MockInstance(); - try { - Connector connector = instance.getConnector("", ""); - setConf(new AccumuloRdfConfiguration()); - AccumuloRyaDAO cdao = new AccumuloRyaDAO(); - cdao.setConnector(connector); - setRyaDAO(cdao); - inferenceEngine = new InferenceEngine(); - inferenceEngine.setRyaDAO(cdao); - inferenceEngine.setRefreshGraphSchedule(1000); //every sec - setInferenceEngine(inferenceEngine); - } catch (Exception e) { - e.printStackTrace(); - } - } - } -} diff --git a/osgi/pom.xml b/osgi/pom.xml deleted file mode 100644 index e0a89929a..000000000 --- a/osgi/pom.xml +++ /dev/null @@ -1,91 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - - rya.osgi - Apache Rya OSGI Bundle - - pom - - - 2.1.0 - - - - alx.rya - alx.rya.console - camel.rya - - - - - - - - org.apache.rat - apache-rat-plugin - - - sesame-runtime-osgi/openrdf-sesame-osgi.bnd - - - - - - - - org.apache.felix - maven-bundle-plugin - ${maven-bundle-plugin.version} - true - - META-INF - - ${project.groupId}.${project.artifactId} - - ${project.version} - * - <_exportcontents>* - . - - - - - - genManifest - process-classes - - manifest - - - - - - - - diff --git a/osgi/sesame-runtime-osgi/openrdf-sesame-osgi.bnd b/osgi/sesame-runtime-osgi/openrdf-sesame-osgi.bnd deleted file mode 100644 index c0aea0764..000000000 --- a/osgi/sesame-runtime-osgi/openrdf-sesame-osgi.bnd +++ /dev/null @@ -1,7 +0,0 @@ --classpath= target/sesame-runtime-osgi.jar --output= target/sesame-runtime-osgi-2.6.4.jar -Import-Package= *;resolution:=optional -Export-Package= * -Bundle-Version= 2.6.4 -Bundle-SymbolicName= sesame-runtime-osgi -DynamicImport-Package= * diff --git a/osgi/sesame-runtime-osgi/pom.xml b/osgi/sesame-runtime-osgi/pom.xml deleted file mode 100644 index c454a664c..000000000 --- a/osgi/sesame-runtime-osgi/pom.xml +++ /dev/null @@ -1,139 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.osgi - 3.2.10-SNAPSHOT - - - sesame-runtime-osgi - Sesame Runtime for OSGI - - pom - - - - org.openrdf.sesame - sesame-runtime-osgi - - - biz.aQute - bnd - 0.0.397 - - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - copy - generate-resources - - copy - - - - - org.openrdf.sesame - sesame-runtime-osgi - ${project.version} - ${project.build.directory} - sesame-runtime-osgi.jar - - - biz.aQute - bnd - 0.0.397 - ${project.build.directory} - bnd.jar - - - - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.1 - - - process-resources - - exec - - - - - java - - -jar - target/bnd.jar - build - openrdf-sesame-osgi.bnd - - - - - org.codehaus.mojo - build-helper-maven-plugin - - 1.7 - - - attach-artifacts - package - - attach-artifact - - - - - ${project.build.directory}/sesame-runtime-osgi-${project.version}.jar - - - - - - - - - - - - true - - - true - - bndrepo - aQute BND Repo - http://www.aqute.biz/repo - - - diff --git a/pig/accumulo.pig/pom.xml b/pig/accumulo.pig/pom.xml deleted file mode 100644 index 1a1e5f993..000000000 --- a/pig/accumulo.pig/pom.xml +++ /dev/null @@ -1,84 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya.pig - 3.2.10-SNAPSHOT - - - accumulo.pig - Apache Rya Accumulo Pig - - - - org.apache.rya - rya.sail - - - org.apache.rya - accumulo.rya - - - - org.openrdf.sesame - sesame-queryparser-sparql - - - - org.apache.pig - pig - provided - - - org.antlr - antlr-runtime - provided - - - - junit - junit - test - - - - - - org.apache.rat - apache-rat-plugin - - - src/test/resources/ResultsFile1.txt - src/test/resources/testQuery.txt - src/test/resources/testQuery2.txt - - - - - org.apache.maven.plugins - maven-shade-plugin - - - - diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java deleted file mode 100644 index 054146d8e..000000000 --- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java +++ /dev/null @@ -1,383 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; -import org.apache.accumulo.core.client.mapreduce.lib.util.ConfiguratorBase; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.accumulo.core.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.WritableComparable; -import org.apache.hadoop.mapreduce.InputFormat; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.OutputFormat; -import org.apache.hadoop.mapreduce.RecordReader; -import org.apache.hadoop.mapreduce.RecordWriter; -import org.apache.pig.LoadFunc; -import org.apache.pig.OrderedLoadFunc; -import org.apache.pig.ResourceSchema; -import org.apache.pig.StoreFuncInterface; -import org.apache.pig.backend.executionengine.ExecException; -import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; -import org.apache.pig.data.DataByteArray; -import org.apache.pig.data.Tuple; -import org.apache.pig.data.TupleFactory; - -/** - * A LoadStoreFunc for retrieving data from and storing data to Accumulo - *

- * A Key/Val pair will be returned as tuples: (key, colfam, colqual, colvis, timestamp, value). All fields except timestamp are DataByteArray, timestamp is a long. - *

- * Tuples can be written in 2 forms: - * (key, colfam, colqual, colvis, value) - * OR - * (key, colfam, colqual, value) - */ -public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, OrderedLoadFunc { - private static final Log logger = LogFactory.getLog(AccumuloStorage.class); - - protected Configuration conf; - protected RecordReader reader; - protected RecordWriter writer; - - protected String inst; - protected String zookeepers; - protected String user = ""; - protected String password = ""; - protected String table; - protected Text tableName; - protected String auths; - protected Authorizations authorizations = Constants.NO_AUTHS; - protected List> columnFamilyColumnQualifierPairs = new LinkedList>(); - - protected Collection ranges = new ArrayList(); - protected boolean mock = false; - - public AccumuloStorage() { - } - - @Override - public Tuple getNext() throws IOException { - try { - // load the next pair - if (!reader.nextKeyValue()) { - logger.info("Reached end of results"); - return null; - } - - Key key = (Key) reader.getCurrentKey(); - Value value = (Value) reader.getCurrentValue(); - assert key != null && value != null; - - if (logger.isTraceEnabled()) { - logger.trace("Found key[" + key + "] and value[" + value + "]"); - } - - // and wrap it in a tuple - Tuple tuple = TupleFactory.getInstance().newTuple(6); - tuple.set(0, new DataByteArray(key.getRow().getBytes())); - tuple.set(1, new DataByteArray(key.getColumnFamily().getBytes())); - tuple.set(2, new DataByteArray(key.getColumnQualifier().getBytes())); - tuple.set(3, new DataByteArray(key.getColumnVisibility().getBytes())); - tuple.set(4, key.getTimestamp()); - tuple.set(5, new DataByteArray(value.get())); - if (logger.isTraceEnabled()) { - logger.trace("Output tuple[" + tuple + "]"); - } - return tuple; - } catch (InterruptedException e) { - throw new IOException(e.getMessage()); - } - } - - @Override - public InputFormat getInputFormat() { - return new AccumuloInputFormat(); - } - - @Override - public void prepareToRead(RecordReader reader, PigSplit split) { - this.reader = reader; - } - - @Override - public void setLocation(String location, Job job) throws IOException { - if (logger.isDebugEnabled()) { - logger.debug("Set Location[" + location + "] for job[" + job.getJobName() + "]"); - } - conf = job.getConfiguration(); - setLocationFromUri(location, job); - - if (!ConfiguratorBase.isConnectorInfoSet(AccumuloInputFormat.class, conf)) { - try { - AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(password.getBytes())); - } catch (AccumuloSecurityException e) { - throw new RuntimeException(e); - } - AccumuloInputFormat.setInputTableName(job, table); - AccumuloInputFormat.setScanAuthorizations(job, authorizations); - if (!mock) { - AccumuloInputFormat.setZooKeeperInstance(job, inst, zookeepers); - } else { - AccumuloInputFormat.setMockInstance(job, inst); - } - } - if (columnFamilyColumnQualifierPairs.size() > 0) - AccumuloInputFormat.fetchColumns(job, columnFamilyColumnQualifierPairs); - logger.info("Set ranges[" + ranges + "] for job[" + job.getJobName() + "] on table[" + table + "] " + - "for columns[" + columnFamilyColumnQualifierPairs + "] with authorizations[" + authorizations + "]"); - - if (ranges.size() == 0) { - throw new IOException("Accumulo Range must be specified"); - } - AccumuloInputFormat.setRanges(job, ranges); - } - - protected void setLocationFromUri(String uri, Job job) throws IOException { - // ex: accumulo://table1?instance=myinstance&user=root&password=secret&zookeepers=127.0.0.1:2181&auths=PRIVATE,PUBLIC&columns=col1|cq1,col2|cq2&range=a|z&range=1|9&mock=true - try { - if (!uri.startsWith("accumulo://")) - throw new Exception("Bad scheme."); - String[] urlParts = uri.split("\\?"); - setLocationFromUriParts(urlParts); - - } catch (Exception e) { - throw new IOException("Expected 'accumulo://[?instance=&user=&password=&zookeepers=&auths=&[range=startRow|endRow[...],columns=[cf1|cq1,cf2|cq2,...]],mock=true(false)]': " + e.getMessage(), e); - } - } - - protected void setLocationFromUriParts(String[] urlParts) { - String columns = ""; - if (urlParts.length > 1) { - for (String param : urlParts[1].split("&")) { - String[] pair = param.split("="); - if (pair[0].equals("instance")) { - inst = pair[1]; - } else if (pair[0].equals("user")) { - user = pair[1]; - } else if (pair[0].equals("password")) { - password = pair[1]; - } else if (pair[0].equals("zookeepers")) { - zookeepers = pair[1]; - } else if (pair[0].equals("auths")) { - auths = pair[1]; - } else if (pair[0].equals("columns")) { - columns = pair[1]; - } else if (pair[0].equals("range")) { - String[] r = pair[1].split("\\|"); - if (r.length == 2) { - addRange(new Range(r[0], r[1])); - } else { - addRange(new Range(r[0])); - } - } else if (pair[0].equals("mock")) { - this.mock = Boolean.parseBoolean(pair[1]); - } - addLocationFromUriPart(pair); - } - } - String[] parts = urlParts[0].split("/+"); - table = parts[1]; - tableName = new Text(table); - - if (auths == null || auths.equals("")) { - authorizations = new Authorizations(); - } else { - authorizations = new Authorizations(auths.split(",")); - } - - if (!columns.equals("")) { - for (String cfCq : columns.split(",")) { - if (cfCq.contains("|")) { - String[] c = cfCq.split("\\|"); - String cf = c[0]; - String cq = c[1]; - addColumnPair(cf, cq); - } else { - addColumnPair(cfCq, null); - } - } - } - } - - protected void addColumnPair(String cf, String cq) { - columnFamilyColumnQualifierPairs.add(new Pair((cf != null) ? new Text(cf) : null, (cq != null) ? new Text(cq) : null)); - } - - protected void addLocationFromUriPart(String[] pair) { - - } - - protected void addRange(Range range) { - ranges.add(range); - } - - @Override - public String relativeToAbsolutePath(String location, Path curDir) throws IOException { - return location; - } - - @Override - public void setUDFContextSignature(String signature) { - - } - - /* StoreFunc methods */ - public void setStoreFuncUDFContextSignature(String signature) { - - } - - public String relToAbsPathForStoreLocation(String location, Path curDir) throws IOException { - return relativeToAbsolutePath(location, curDir); - } - - public void setStoreLocation(String location, Job job) throws IOException { - conf = job.getConfiguration(); - setLocationFromUri(location, job); - - if (!conf.getBoolean(AccumuloOutputFormat.class.getSimpleName() + ".configured", false)) { - try { - AccumuloOutputFormat.setConnectorInfo(job, user, new PasswordToken(password.getBytes())); - } catch (AccumuloSecurityException e) { - new RuntimeException(e); - } - AccumuloOutputFormat.setDefaultTableName(job, table); - AccumuloOutputFormat.setZooKeeperInstance(job, inst, zookeepers); - BatchWriterConfig config = new BatchWriterConfig(); - config.setMaxLatency(10, TimeUnit.SECONDS); - config.setMaxMemory(10 * 1000 * 1000); - config.setMaxWriteThreads(10); - AccumuloOutputFormat.setBatchWriterOptions(job, config); - } - } - - public OutputFormat getOutputFormat() { - return new AccumuloOutputFormat(); - } - - public void checkSchema(ResourceSchema schema) throws IOException { - // we don't care about types, they all get casted to ByteBuffers - } - - public void prepareToWrite(RecordWriter writer) { - this.writer = writer; - } - - public void putNext(Tuple t) throws ExecException, IOException { - Mutation mut = new Mutation(objToText(t.get(0))); - Text cf = objToText(t.get(1)); - Text cq = objToText(t.get(2)); - - if (t.size() > 4) { - Text cv = objToText(t.get(3)); - Value val = new Value(objToBytes(t.get(4))); - if (cv.getLength() == 0) { - mut.put(cf, cq, val); - } else { - mut.put(cf, cq, new ColumnVisibility(cv), val); - } - } else { - Value val = new Value(objToBytes(t.get(3))); - mut.put(cf, cq, val); - } - - try { - writer.write(tableName, mut); - } catch (InterruptedException e) { - throw new IOException(e); - } - } - - private static Text objToText(Object o) { - return new Text(objToBytes(o)); - } - - private static byte[] objToBytes(Object o) { - if (o instanceof String) { - String str = (String) o; - return str.getBytes(); - } else if (o instanceof Long) { - Long l = (Long) o; - return l.toString().getBytes(); - } else if (o instanceof Integer) { - Integer l = (Integer) o; - return l.toString().getBytes(); - } else if (o instanceof Boolean) { - Boolean l = (Boolean) o; - return l.toString().getBytes(); - } else if (o instanceof Float) { - Float l = (Float) o; - return l.toString().getBytes(); - } else if (o instanceof Double) { - Double l = (Double) o; - return l.toString().getBytes(); - } - - // TODO: handle DataBag, Map, and Tuple - - return ((DataByteArray) o).get(); - } - - public void cleanupOnFailure(String failure, Job job) { - } - - @Override - public WritableComparable getSplitComparable(InputSplit inputSplit) throws IOException { - //cannot get access to the range directly - AccumuloInputFormat.RangeInputSplit rangeInputSplit = (AccumuloInputFormat.RangeInputSplit) inputSplit; - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - DataOutputStream out = new DataOutputStream(baos); - rangeInputSplit.write(out); - out.close(); - DataInputStream stream = new DataInputStream(new ByteArrayInputStream(baos.toByteArray())); - Range range = new Range(); - range.readFields(stream); - stream.close(); - return range; - } -} diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java deleted file mode 100644 index 392c10851..000000000 --- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java +++ /dev/null @@ -1,348 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.File; -import java.io.IOException; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.regex.Pattern; - -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Counter; -import org.apache.hadoop.mapreduce.Counters; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Logger; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.ProjectionElem; -import org.openrdf.query.algebra.ProjectionElemList; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; - -public class IndexWritingTool extends Configured implements Tool { - - private static final String sparql_key = "SPARQL.VALUE"; - private static String cardCounter = "count"; - - - public static void main(String[] args) throws Exception { - - ToolRunner.run(new Configuration(), new IndexWritingTool(), args); - - } - - @Override - public int run(final String[] args) throws Exception { - Preconditions.checkArgument(args.length == 7, "java " + IndexWritingTool.class.getCanonicalName() - + " hdfsSaveLocation sparqlFile cbinstance cbzk cbuser cbpassword rdfTablePrefix."); - - final String inputDir = args[0]; - final String sparqlFile = args[1]; - final String instStr = args[2]; - final String zooStr = args[3]; - final String userStr = args[4]; - final String passStr = args[5]; - final String tablePrefix = args[6]; - - String sparql = FileUtils.readFileToString(new File(sparqlFile)); - - Job job = new Job(getConf(), "Write HDFS Index to Accumulo"); - job.setJarByClass(this.getClass()); - - Configuration jobConf = job.getConfiguration(); - jobConf.setBoolean("mapred.map.tasks.speculative.execution", false); - setVarOrders(sparql, jobConf); - - TextInputFormat.setInputPaths(job, inputDir); - job.setInputFormatClass(TextInputFormat.class); - - job.setMapperClass(MyMapper.class); - job.setMapOutputKeyClass(Text.class); - job.setMapOutputValueClass(Mutation.class); - - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Mutation.class); - - job.setNumReduceTasks(0); - - String tableName; - if (zooStr.equals("mock")) { - tableName = tablePrefix; - } else { - tableName = tablePrefix + "INDEX_" + UUID.randomUUID().toString().replace("-", "").toUpperCase(); - } - setAccumuloOutput(instStr, zooStr, userStr, passStr, job, tableName); - - jobConf.set(sparql_key, sparql); - - int complete = job.waitForCompletion(true) ? 0 : -1; - - if (complete == 0) { - - String[] varOrders = jobConf.getStrings("varOrders"); - String orders = Joiner.on("\u0000").join(varOrders); - Instance inst; - - if (zooStr.equals("mock")) { - inst = new MockInstance(instStr); - } else { - inst = new ZooKeeperInstance(instStr, zooStr); - } - - Connector conn = inst.getConnector(userStr, passStr.getBytes()); - BatchWriter bw = conn.createBatchWriter(tableName, 10, 5000, 1); - - Counters counters = job.getCounters(); - Counter c1 = counters.findCounter(cardCounter, cardCounter); - - Mutation m = new Mutation("~SPARQL"); - Value v = new Value(sparql.getBytes()); - m.put(new Text("" + c1.getValue()), new Text(orders), v); - bw.addMutation(m); - - bw.close(); - - return complete; - } else { - return complete; - } - - - } - - - public void setVarOrders(String s, Configuration conf) throws MalformedQueryException { - - SPARQLParser parser = new SPARQLParser(); - TupleExpr query = parser.parseQuery(s, null).getTupleExpr(); - - List projList = Lists.newArrayList(((Projection) query).getProjectionElemList().getTargetNames()); - String projElems = Joiner.on(";").join(projList); - conf.set("projElems", projElems); - - Pattern splitPattern1 = Pattern.compile("\n"); - Pattern splitPattern2 = Pattern.compile(","); - String[] lines = splitPattern1.split(s); - - List varOrders = Lists.newArrayList(); - List varOrderPos = Lists.newArrayList(); - - int orderNum = 0; - int projSizeSq = projList.size()*projList.size(); - - for (String t : lines) { - - - if(orderNum > projSizeSq){ - break; - } - - String[] order = null; - if (t.startsWith("#prefix")) { - t = t.substring(7).trim(); - order = splitPattern2.split(t, projList.size()); - } - - - String tempVarOrder = ""; - String tempVarOrderPos = ""; - - if (order != null) { - for (String u : order) { - if (tempVarOrder.length() == 0) { - tempVarOrder = u.trim(); - } else { - tempVarOrder = tempVarOrder + ";" + u.trim(); - } - int pos = projList.indexOf(u.trim()); - if (pos < 0) { - throw new IllegalArgumentException("Invalid variable order!"); - } else { - if (tempVarOrderPos.length() == 0) { - tempVarOrderPos = tempVarOrderPos + pos; - } else { - tempVarOrderPos = tempVarOrderPos + ";" + pos; - } - } - } - - varOrders.add(tempVarOrder); - varOrderPos.add(tempVarOrderPos); - } - - if(tempVarOrder.length() > 0) { - orderNum++; - } - - } - - if(orderNum == 0) { - varOrders.add(projElems); - String tempVarPos = ""; - - for(int i = 0; i < projList.size(); i++) { - if(i == 0) { - tempVarPos = Integer.toString(0); - } else { - tempVarPos = tempVarPos + ";" + i; - } - } - varOrderPos.add(tempVarPos); - - } - - String[] vOrders = varOrders.toArray(new String[varOrders.size()]); - String[] vOrderPos = varOrderPos.toArray(new String[varOrderPos.size()]); - - - - conf.setStrings("varOrders", vOrders); - conf.setStrings("varOrderPos", vOrderPos); - - } - - - private static void setAccumuloOutput(String instStr, String zooStr, String userStr, String passStr, Job job, String tableName) - throws AccumuloSecurityException { - - AuthenticationToken token = new PasswordToken(passStr); - AccumuloOutputFormat.setConnectorInfo(job, userStr, token); - AccumuloOutputFormat.setDefaultTableName(job, tableName); - AccumuloOutputFormat.setCreateTables(job, true); - //TODO best way to do this? - - if (zooStr.equals("mock")) { - AccumuloOutputFormat.setMockInstance(job, instStr); - } else { - AccumuloOutputFormat.setZooKeeperInstance(job, instStr, zooStr); - } - - job.setOutputFormatClass(AccumuloOutputFormat.class); - - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(Mutation.class); - } - - public static class MyMapper extends Mapper { - - private static final Logger logger = Logger.getLogger(MyMapper.class); - final static Text EMPTY_TEXT = new Text(); - final static Value EMPTY_VALUE = new Value(new byte[] {}); - private String[] varOrderPos = null; - private String[] projElem = null; - private Pattern splitPattern = null; - private List> varPositions = Lists.newArrayList(); - - - - @Override - protected void setup(Mapper.Context context) throws IOException, - InterruptedException { - - Configuration conf = context.getConfiguration(); - - varOrderPos = conf.getStrings("varOrderPos"); - splitPattern = Pattern.compile("\t"); - - for (String s : varOrderPos) { - String[] pos = s.split(";"); - List intPos = Lists.newArrayList(); - int i = 0; - for(String t: pos) { - i = Integer.parseInt(t); - intPos.add(i); - } - - varPositions.add(intPos); - - } - - projElem = conf.get("projElems").split(";"); - - super.setup(context); - } - - - - - - - @Override - public void map(LongWritable key, Text value, Context output) throws IOException, InterruptedException { - - String[] result = splitPattern.split(value.toString()); - - - for (List list : varPositions) { - - String values = ""; - String vars = ""; - - for (Integer i : list) { - - if (values.length() == 0) { - values = result[i]; - vars = projElem[i]; - } else { - values = values + "\u0000" + result[i]; - vars = vars + "\u0000" + projElem[i]; - } - - } - Mutation m = new Mutation(new Text(values)); - m.put(new Text(vars), EMPTY_TEXT, EMPTY_VALUE); - output.write(EMPTY_TEXT, m); - - } - output.getCounter(cardCounter, cardCounter).increment(1); - - } - } - -} diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java deleted file mode 100644 index ed8134d39..000000000 --- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java +++ /dev/null @@ -1,268 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import com.google.common.io.ByteStreams; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRdfEvalStatsDAO; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.accumulo.pig.optimizer.SimilarVarJoinOptimizer; -import mvm.rya.rdftriplestore.evaluation.QueryJoinOptimizer; -import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.inference.InverseOfVisitor; -import mvm.rya.rdftriplestore.inference.SymmetricPropertyVisitor; -import mvm.rya.rdftriplestore.inference.TransitivePropertyVisitor; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.pig.ExecType; -import org.apache.pig.PigServer; -import org.openrdf.query.algebra.QueryRoot; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.QueryParser; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import java.io.ByteArrayInputStream; -import java.io.FileInputStream; -import java.io.IOException; - -/** - * Created by IntelliJ IDEA. - * Date: 4/23/12 - * Time: 9:31 AM - * To change this template use File | Settings | File Templates. - */ -public class SparqlQueryPigEngine { - private static final Log logger = LogFactory.getLog(SparqlQueryPigEngine.class); - - private String hadoopDir; - private ExecType execType = ExecType.MAPREDUCE; //default to mapreduce - private boolean inference = true; - private boolean stats = true; - private SparqlToPigTransformVisitor sparqlToPigTransformVisitor; - private PigServer pigServer; - private InferenceEngine inferenceEngine = null; - private RdfCloudTripleStoreEvaluationStatistics rdfCloudTripleStoreEvaluationStatistics; - private AccumuloRyaDAO ryaDAO; - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - private AccumuloRdfEvalStatsDAO rdfEvalStatsDAO; - - public AccumuloRdfConfiguration getConf() { - return conf; - } - - public void setConf(AccumuloRdfConfiguration conf) { - this.conf = conf; - } - - public void init() throws Exception { - Preconditions.checkNotNull(sparqlToPigTransformVisitor, "Sparql To Pig Transform Visitor must not be null"); - logger.info("Initializing Sparql Query Pig Engine"); - if (hadoopDir != null) { - //set hadoop dir property - System.setProperty("HADOOPDIR", hadoopDir); - } - //TODO: Maybe have validation of the HadoopDir system property - - if (pigServer == null) { - pigServer = new PigServer(execType); - } - - if (inference || stats) { - String instance = sparqlToPigTransformVisitor.getInstance(); - String zoo = sparqlToPigTransformVisitor.getZk(); - String user = sparqlToPigTransformVisitor.getUser(); - String pass = sparqlToPigTransformVisitor.getPassword(); - - Connector connector = new ZooKeeperInstance(instance, zoo).getConnector(user, pass.getBytes()); - - String tablePrefix = sparqlToPigTransformVisitor.getTablePrefix(); - conf.setTablePrefix(tablePrefix); - if (inference) { - logger.info("Using inference"); - inferenceEngine = new InferenceEngine(); - ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConf(conf); - ryaDAO.setConnector(connector); - ryaDAO.init(); - - inferenceEngine.setRyaDAO(ryaDAO); - inferenceEngine.setConf(conf); - inferenceEngine.setSchedule(false); - inferenceEngine.init(); - } - if (stats) { - logger.info("Using stats"); - rdfEvalStatsDAO = new AccumuloRdfEvalStatsDAO(); - rdfEvalStatsDAO.setConf(conf); - rdfEvalStatsDAO.setConnector(connector); -// rdfEvalStatsDAO.setEvalTable(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - rdfEvalStatsDAO.init(); - rdfCloudTripleStoreEvaluationStatistics = new RdfCloudTripleStoreEvaluationStatistics(conf, rdfEvalStatsDAO); - } - } - } - - public void destroy() throws Exception { - logger.info("Shutting down Sparql Query Pig Engine"); - pigServer.shutdown(); - if (ryaDAO != null) { - ryaDAO.destroy(); - } - if (inferenceEngine != null) { - inferenceEngine.destroy(); - } - if (rdfEvalStatsDAO != null) { - rdfEvalStatsDAO.destroy(); - } - } - - /** - * Transform a sparql query into a pig script and execute it. Save results in hdfsSaveLocation - * - * @param sparql to execute - * @param hdfsSaveLocation to save the execution - * @throws java.io.IOException - */ - public void runQuery(String sparql, String hdfsSaveLocation) throws IOException { - Preconditions.checkNotNull(sparql, "Sparql query cannot be null"); - Preconditions.checkNotNull(hdfsSaveLocation, "Hdfs save location cannot be null"); - logger.info("Running query[" + sparql + "]\n to Location[" + hdfsSaveLocation + "]"); - pigServer.deleteFile(hdfsSaveLocation); - try { - String pigScript = generatePigScript(sparql); - if (logger.isDebugEnabled()) { - logger.debug("Pig script [" + pigScript + "]"); - } - pigServer.registerScript(new ByteArrayInputStream(pigScript.getBytes())); - pigServer.store("PROJ", hdfsSaveLocation); //TODO: Make this a constant - } catch (Exception e) { - throw new IOException(e); - } - } - - public String generatePigScript(String sparql) throws Exception { - Preconditions.checkNotNull(sparql, "Sparql query cannot be null"); - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(sparql, null); - QueryRoot tupleExpr = new QueryRoot(parsedQuery.getTupleExpr()); - -// SimilarVarJoinOptimizer similarVarJoinOptimizer = new SimilarVarJoinOptimizer(); -// similarVarJoinOptimizer.optimize(tupleExpr, null, null); - - if (inference || stats) { - if (inference) { - tupleExpr.visit(new TransitivePropertyVisitor(conf, inferenceEngine)); - tupleExpr.visit(new SymmetricPropertyVisitor(conf, inferenceEngine)); - tupleExpr.visit(new InverseOfVisitor(conf, inferenceEngine)); - } - if (stats) { - (new QueryJoinOptimizer(rdfCloudTripleStoreEvaluationStatistics)).optimize(tupleExpr, null, null); - } - } - - sparqlToPigTransformVisitor.meet(tupleExpr); - return sparqlToPigTransformVisitor.getPigScript(); - } - - - public static void main(String[] args) { - try { - Preconditions.checkArgument(args.length == 7, "Usage: java -cp :$PIG_LIB sparqlFile hdfsSaveLocation cbinstance cbzk cbuser cbpassword rdfTablePrefix.\n " + - "Sample command: java -cp java -cp cloudbase.pig-2.0.0-SNAPSHOT-shaded.jar:/usr/local/hadoop-etc/hadoop-0.20.2/hadoop-0.20.2-core.jar:/srv_old/hdfs-tmp/pig/pig-0.9.2/pig-0.9.2.jar:$HADOOP_HOME/conf mvm.rya.accumulo.pig.SparqlQueryPigEngine " + - "tstSpqrl.query temp/engineTest stratus stratus13:2181 root password l_"); - String sparql = new String(ByteStreams.toByteArray(new FileInputStream(args[0]))); - String hdfsSaveLocation = args[1]; - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(args[6]); - visitor.setInstance(args[2]); - visitor.setZk(args[3]); - visitor.setUser(args[4]); - visitor.setPassword(args[5]); - - SparqlQueryPigEngine engine = new SparqlQueryPigEngine(); - engine.setSparqlToPigTransformVisitor(visitor); - engine.setInference(false); - engine.setStats(false); - - engine.init(); - - engine.runQuery(sparql, hdfsSaveLocation); - - engine.destroy(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public String getHadoopDir() { - return hadoopDir; - } - - public void setHadoopDir(String hadoopDir) { - this.hadoopDir = hadoopDir; - } - - public PigServer getPigServer() { - return pigServer; - } - - public void setPigServer(PigServer pigServer) { - this.pigServer = pigServer; - } - - public ExecType getExecType() { - return execType; - } - - public void setExecType(ExecType execType) { - this.execType = execType; - } - - public boolean isInference() { - return inference; - } - - public void setInference(boolean inference) { - this.inference = inference; - } - - public boolean isStats() { - return stats; - } - - public void setStats(boolean stats) { - this.stats = stats; - } - - public SparqlToPigTransformVisitor getSparqlToPigTransformVisitor() { - return sparqlToPigTransformVisitor; - } - - public void setSparqlToPigTransformVisitor(SparqlToPigTransformVisitor sparqlToPigTransformVisitor) { - this.sparqlToPigTransformVisitor = sparqlToPigTransformVisitor; - } -} diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java deleted file mode 100644 index 38d8adb62..000000000 --- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java +++ /dev/null @@ -1,345 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Literal; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.query.algebra.*; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import java.util.*; - -/** - * Created by IntelliJ IDEA. - * Date: 4/12/12 - * Time: 10:17 AM - * To change this template use File | Settings | File Templates. - */ -public class SparqlToPigTransformVisitor extends QueryModelVisitorBase { - private StringBuilder pigScriptBuilder = new StringBuilder(); - private String tablePrefix; - private String instance, zk, user, password; //TODO: use a Configuration object to get these - - private Map varToSet = new HashMap(); - private Map> exprToNames = new HashMap>(); - private Map exprToVar = new HashMap(); - - private char i = 'A'; //TODO: do better, hack - - public SparqlToPigTransformVisitor() { - pigScriptBuilder.append("set pig.splitCombination false;\n") - .append("set default_parallel 32;\n") //TODO: set parallel properly - .append("set mapred.map.tasks.speculative.execution false;\n") - .append("set mapred.reduce.tasks.speculative.execution false;\n") - .append("set io.sort.mb 256;\n") - .append("set mapred.child.java.opts -Xmx2048m;\n") - .append("set mapred.compress.map.output true;\n") - .append("set mapred.map.output.compression.codec org.apache.hadoop.io.compress.GzipCodec;\n") - .append("set io.file.buffer.size 65536;\n") - .append("set io.sort.factor 25;\n"); - } - - @Override - public void meet(StatementPattern node) throws RuntimeException { - super.meet(node); - String subjValue = getVarValue(node.getSubjectVar()); - String predValue = getVarValue(node.getPredicateVar()); - String objValue = getVarValue(node.getObjectVar()); - - String subj = i + "_s"; - String pred = i + "_p"; - String obj = i + "_o"; - String var = i + ""; - if (node.getSubjectVar().getValue() == null) { //TODO: look nicer - subj = node.getSubjectVar().getName(); - varToSet.put(subj, var); - - addToExprToNames(node, subj); - } - if (node.getPredicateVar().getValue() == null) { //TODO: look nicer - pred = node.getPredicateVar().getName(); - varToSet.put(pred, var); - - addToExprToNames(node, pred); - } - if (node.getObjectVar().getValue() == null) { //TODO: look nicer - obj = node.getObjectVar().getName(); - varToSet.put(obj, var); - - addToExprToNames(node, obj); - } - if (node.getContextVar() != null && node.getContextVar().getValue() == null) { - String cntxtName = node.getContextVar().getName(); - varToSet.put(cntxtName, var); - - addToExprToNames(node, cntxtName); - } - //load 'l_' using mvm.rya.cloudbase.pig.dep.StatementPatternStorage('', '', '', - // 'stratus', 'stratus13:2181', 'root', 'password') AS (dept:chararray, p:chararray, univ:chararray); -// pigScriptBuilder.append(i).append(" = load '").append(tablePrefix).append("' using mvm.rya.cloudbase.pig.dep.StatementPatternStorage('") -// .append(subjValue).append("','").append(predValue).append("','").append(objValue).append("','").append(instance).append("','") -// .append(zk).append("','").append(user).append("','").append(password).append("') AS (").append(subj).append(":chararray, ") -// .append(pred).append(":chararray, ").append(obj).append(":chararray);\n"); - - //load 'cloudbase://tablePrefix?instance=myinstance&user=root&password=secret&zookeepers=127.0.0.1:2181&auths=PRIVATE,PUBLIC&subject=a&predicate=b&object=c' - //using mvm.rya.accumulo.pig.StatementPatternStorage() AS (dept:chararray, p:chararray, univ:chararray); - pigScriptBuilder.append(i).append(" = load 'accumulo://").append(tablePrefix).append("?instance=").append(instance).append("&user=").append(user) - .append("&password=").append(password).append("&zookeepers=").append(zk); - if (subjValue != null && subjValue.length() > 0) { - pigScriptBuilder.append("&subject=").append(subjValue); - } - if (predValue != null && predValue.length() > 0) { - pigScriptBuilder.append("&predicate=").append(predValue); - } - if (objValue != null && objValue.length() > 0) { - pigScriptBuilder.append("&object=").append(objValue); - } - if (node.getContextVar() != null && node.getContextVar().getValue() != null) { - pigScriptBuilder.append("&context=").append(getVarValue(node.getContextVar())); - } - - pigScriptBuilder.append("' using ").append(StatementPatternStorage.class.getName()).append("() AS (").append(subj).append(":chararray, ") - .append(pred).append(":chararray, ").append(obj).append(":chararray"); - if (node.getContextVar() != null) { - Value cntxtValue = node.getContextVar().getValue(); - String cntxtName = null; - if (cntxtValue == null) { - //use name - cntxtName = node.getContextVar().getName(); - } else { - cntxtName = i + "_c"; - } - pigScriptBuilder.append(", ").append(cntxtName).append(":chararray"); - } - pigScriptBuilder.append(");\n"); - //TODO: add auths - - exprToVar.put(node, var); - i++; - } - - private void addToExprToNames(TupleExpr node, String name) { - List names = exprToNames.get(node); - if (names == null) { - names = new ArrayList(); - exprToNames.put(node, names); - } - names.add(name); - } - - @Override - public void meet(Union node) throws RuntimeException { - super.meet(node); - - TupleExpr leftArg = node.getLeftArg(); - TupleExpr rightArg = node.getRightArg(); - String left_var = exprToVar.get(leftArg); - String right_var = exprToVar.get(rightArg); - //Q = UNION ONSCHEMA B, P; - pigScriptBuilder.append(i).append(" = UNION ONSCHEMA ").append(left_var).append(", ").append(right_var).append(";\n"); - - String unionVar = i + ""; - List left_names = exprToNames.get(leftArg); - List right_names = exprToNames.get(rightArg); - for (String name : left_names) { - varToSet.put(name, unionVar); - addToExprToNames(node, name); - } - for (String name : right_names) { - varToSet.put(name, unionVar); - addToExprToNames(node, name); - } - exprToVar.put(node, unionVar); - i++; - } - - @Override - public void meet(Join node) throws RuntimeException { - super.meet(node); - - TupleExpr leftArg = node.getLeftArg(); - TupleExpr rightArg = node.getRightArg(); - List left_names = exprToNames.get(leftArg); - List right_names = exprToNames.get(rightArg); - - Set joinNames = new HashSet(left_names); - joinNames.retainAll(right_names); //intersection, this is what I join on - //SEC = join FIR by (MEMB_OF::ugrad, SUBORG_J::univ), UGRADDEG by (ugrad, univ); - StringBuilder joinStr = new StringBuilder(); - joinStr.append("("); - boolean first = true; - for (String name : joinNames) { //TODO: Make this a utility method - if (!first) { - joinStr.append(","); - } - first = false; - joinStr.append(name); - } - joinStr.append(")"); - - String left_var = exprToVar.get(leftArg); - String right_var = exprToVar.get(rightArg); - if (joinStr.length() <= 2) { - //no join params, need to cross - pigScriptBuilder.append(i).append(" = cross ").append(left_var).append(", ").append(right_var).append(";\n"); - } else { - //join - pigScriptBuilder.append(i).append(" = join ").append(left_var); - pigScriptBuilder.append(" by ").append(joinStr); - pigScriptBuilder.append(", ").append(right_var); - pigScriptBuilder.append(" by ").append(joinStr); - pigScriptBuilder.append(";\n"); - - } - - String joinVarStr = i + ""; - i++; - // D = foreach C GENERATE A::subj AS subj:chararray, A::A_p AS p:chararray; - String forEachVarStr = i + ""; - pigScriptBuilder.append(i).append(" = foreach ").append(joinVarStr).append(" GENERATE "); - Map nameToJoinName = new HashMap(); - for (String name : left_names) { - varToSet.put(name, forEachVarStr); - addToExprToNames(node, name); - nameToJoinName.put(name, left_var + "::" + name); - } - for (String name : right_names) { - varToSet.put(name, forEachVarStr); - addToExprToNames(node, name); - nameToJoinName.put(name, right_var + "::" + name); - } - - first = true; - for (Map.Entry entry : nameToJoinName.entrySet()) { - if (!first) { - pigScriptBuilder.append(","); - } - first = false; - pigScriptBuilder.append(entry.getValue()).append(" AS ").append(entry.getKey()).append(":chararray "); - } - pigScriptBuilder.append(";\n"); - - exprToVar.put(node, forEachVarStr); - i++; - } - - @Override - public void meet(Projection node) throws RuntimeException { - super.meet(node); - ProjectionElemList list = node.getProjectionElemList(); - String set = null; - StringBuilder projList = new StringBuilder(); - boolean first = true; - //TODO: we do not support projections from multiple pig statements yet - for (String name : list.getTargetNames()) { - set = varToSet.get(name); //TODO: overwrite - if (set == null) { - throw new IllegalArgumentException("Have not found any pig logic for name[" + name + "]"); - } - if (!first) { - projList.append(","); - } - first = false; - projList.append(name); - } - if (set == null) - throw new IllegalArgumentException(""); //TODO: Fill this - //SUBORG = FOREACH SUBORG_L GENERATE dept, univ; - pigScriptBuilder.append("PROJ = FOREACH ").append(set).append(" GENERATE ").append(projList.toString()).append(";\n"); - } - - @Override - public void meet(Slice node) throws RuntimeException { - super.meet(node); - long limit = node.getLimit(); - //PROJ = LIMIT PROJ 10; - pigScriptBuilder.append("PROJ = LIMIT PROJ ").append(limit).append(";\n"); - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getZk() { - return zk; - } - - public void setZk(String zk) { - this.zk = zk; - } - - public String getInstance() { - return instance; - } - - public void setInstance(String instance) { - this.instance = instance; - } - - public String getTablePrefix() { - return tablePrefix; - } - - public void setTablePrefix(String tablePrefix) { - this.tablePrefix = tablePrefix; - } - - public String getPigScript() { - return pigScriptBuilder.toString(); - } - - protected String getVarValue(Var var) { - if (var == null) { - return ""; - } else { - Value value = var.getValue(); - if (value == null) { - return ""; - } - if (value instanceof URI) { - return "<" + value.stringValue() + ">"; - } - if (value instanceof Literal) { - Literal lit = (Literal) value; - if (lit.getDatatype() == null) { - //string - return "\\'" + value.stringValue() + "\\'"; - } - } - return value.stringValue(); - } - - } -} diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java deleted file mode 100644 index 9ec9d4524..000000000 --- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java +++ /dev/null @@ -1,304 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.Collection; -import java.util.Map; -import java.util.Set; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.query.strategy.ByteRange; -import mvm.rya.api.query.strategy.TriplePatternStrategy; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.inference.InferenceEngineException; - -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.pig.data.Tuple; -import org.apache.pig.data.TupleFactory; -import org.openrdf.model.Resource; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.QueryParser; -import org.openrdf.query.parser.sparql.SPARQLParser; - -import com.google.common.io.ByteArrayDataInput; -import com.google.common.io.ByteStreams; - -/** - */ -public class StatementPatternStorage extends AccumuloStorage { - private static final Log logger = LogFactory.getLog(StatementPatternStorage.class); - protected TABLE_LAYOUT layout; - protected String subject = "?s"; - protected String predicate = "?p"; - protected String object = "?o"; - protected String context; - private Value subject_value; - private Value predicate_value; - private Value object_value; - - private RyaTripleContext ryaContext; - - /** - * whether to turn inferencing on or off - */ - private boolean infer = false; - - public StatementPatternStorage() { - if (super.conf != null){ - ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(super.conf)); - } - else { - ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration()); - } - - } - - private Value getValue(Var subjectVar) { - return subjectVar.hasValue() ? subjectVar.getValue() : null; - } - - @Override - public void setLocation(String location, Job job) throws IOException { - super.setLocation(location, job); - } - - @Override - protected void setLocationFromUri(String uri, Job job) throws IOException { - super.setLocationFromUri(uri, job); - // ex: accumulo://tablePrefix?instance=myinstance&user=root&password=secret&zookeepers=127.0.0.1:2181&auths=PRIVATE,PUBLIC&subject=a&predicate=b&object=c&context=c&infer=true - addStatementPatternRange(subject, predicate, object, context); - if (infer) { - addInferredRanges(table, job); - } - - if (layout == null || ranges.size() == 0) - throw new IllegalArgumentException("Range and/or layout is null. Check the query"); - table = RdfCloudTripleStoreUtils.layoutPrefixToTable(layout, table); - tableName = new Text(table); - } - - @Override - protected void addLocationFromUriPart(String[] pair) { - if (pair[0].equals("subject")) { - this.subject = pair[1]; - } else if (pair[0].equals("predicate")) { - this.predicate = pair[1]; - } else if (pair[0].equals("object")) { - this.object = pair[1]; - } else if (pair[0].equals("context")) { - this.context = pair[1]; - } else if (pair[0].equals("infer")) { - this.infer = Boolean.parseBoolean(pair[1]); - } - } - - protected void addStatementPatternRange(String subj, String pred, String obj, String ctxt) throws IOException { - logger.info("Adding statement pattern[subject:" + subj + ", predicate:" + pred + ", object:" + obj + ", context:" + ctxt + "]"); - StringBuilder sparqlBuilder = new StringBuilder(); - sparqlBuilder.append("select * where {\n"); - if (ctxt != null) { - /** - * select * where { - GRAPH ?g { - ?p ?o. - } - } - */ - sparqlBuilder.append("GRAPH ").append(ctxt).append(" {\n"); - } - sparqlBuilder.append(subj).append(" ").append(pred).append(" ").append(obj).append(".\n"); - if (ctxt != null) { - sparqlBuilder.append("}\n"); - } - sparqlBuilder.append("}\n"); - String sparql = sparqlBuilder.toString(); - - if (logger.isDebugEnabled()) { - logger.debug("Sparql statement range[" + sparql + "]"); - } - - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = null; - try { - parsedQuery = parser.parseQuery(sparql, null); - } catch (MalformedQueryException e) { - throw new IOException(e); - } - parsedQuery.getTupleExpr().visitChildren(new QueryModelVisitorBase() { - @Override - public void meet(StatementPattern node) throws IOException { - Var subjectVar = node.getSubjectVar(); - Var predicateVar = node.getPredicateVar(); - Var objectVar = node.getObjectVar(); - subject_value = getValue(subjectVar); - predicate_value = getValue(predicateVar); - object_value = getValue(objectVar); - Var contextVar = node.getContextVar(); - Map.Entry temp = createRange(subject_value, predicate_value, object_value); - layout = temp.getKey(); - Range range = temp.getValue(); - addRange(range); - if (contextVar != null && contextVar.getValue() != null) { - String context_str = contextVar.getValue().stringValue(); - addColumnPair(context_str, ""); - } - } - }); - } - - protected Map.Entry createRange(Value s_v, Value p_v, Value o_v) throws IOException { - RyaURI subject_rya = RdfToRyaConversions.convertResource((Resource) s_v); - RyaURI predicate_rya = RdfToRyaConversions.convertURI((URI) p_v); - RyaType object_rya = RdfToRyaConversions.convertValue(o_v); - TriplePatternStrategy strategy = ryaContext.retrieveStrategy(subject_rya, predicate_rya, object_rya, null); - if (strategy == null) - return new RdfCloudTripleStoreUtils.CustomEntry(TABLE_LAYOUT.SPO, new Range()); - Map.Entry entry = strategy.defineRange(subject_rya, predicate_rya, object_rya, null, null); - ByteRange byteRange = entry.getValue(); - return new RdfCloudTripleStoreUtils.CustomEntry( - entry.getKey(), new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd())) - ); - } - - protected void addInferredRanges(String tablePrefix, Job job) throws IOException { - logger.info("Adding inferences to statement pattern[subject:" + subject_value + ", predicate:" + predicate_value + ", object:" + object_value + "]"); - //inference engine - AccumuloRyaDAO ryaDAO = new AccumuloRyaDAO(); - InferenceEngine inferenceEngine = new InferenceEngine(); - try { - AccumuloRdfConfiguration rdfConf = new AccumuloRdfConfiguration(job.getConfiguration()); - rdfConf.setTablePrefix(tablePrefix); - ryaDAO.setConf(rdfConf); - try { - if (!mock) { - ryaDAO.setConnector(new ZooKeeperInstance(inst, zookeepers).getConnector(user, password.getBytes())); - } else { - ryaDAO.setConnector(new MockInstance(inst).getConnector(user, password.getBytes())); - } - } catch (Exception e) { - throw new IOException(e); - } - ryaDAO.init(); - inferenceEngine.setConf(rdfConf); - inferenceEngine.setRyaDAO(ryaDAO); - inferenceEngine.setSchedule(false); - inferenceEngine.init(); - //is it subclassof or subpropertyof - if (RDF.TYPE.equals(predicate_value)) { - //try subclassof - Collection parents = inferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), (URI) object_value); - if (parents != null && parents.size() > 0) { - //subclassof relationships found - //don't add self, that will happen anyway later - //add all relationships - for (URI parent : parents) { - Map.Entry temp = - createRange(subject_value, predicate_value, parent); - Range range = temp.getValue(); - if (logger.isDebugEnabled()) { - logger.debug("Found subClassOf relationship [type:" + object_value + " is subClassOf:" + parent + "]"); - } - addRange(range); - } - } - } else if (predicate_value != null) { - //subpropertyof check - Set parents = inferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), (URI) predicate_value); - for (URI parent : parents) { - Map.Entry temp = - createRange(subject_value, parent, object_value); - Range range = temp.getValue(); - if (logger.isDebugEnabled()) { - logger.debug("Found subPropertyOf relationship [type:" + predicate_value + " is subPropertyOf:" + parent + "]"); - } - addRange(range); - } - } - } catch (Exception e) { - logger.error("Exception in adding inferred ranges", e); - throw new IOException(e); - } finally { - if (inferenceEngine != null) { - try { - inferenceEngine.destroy(); - } catch (InferenceEngineException e) { - logger.error("Exception closing InferenceEngine", e); - } - } - if (ryaDAO != null) { - try { - ryaDAO.destroy(); - } catch (RyaDAOException e) { - logger.error("Exception closing ryadao", e); - } - } - } - } - - @Override - public Tuple getNext() throws IOException { - try { - if (reader.nextKeyValue()) { - Key key = (Key) reader.getCurrentKey(); - org.apache.accumulo.core.data.Value value = (org.apache.accumulo.core.data.Value) reader.getCurrentValue(); - ByteArrayDataInput input = ByteStreams.newDataInput(key.getRow().getBytes()); - RyaStatement ryaStatement = ryaContext.deserializeTriple(layout, new TripleRow(key.getRow().getBytes(), - key.getColumnFamily().getBytes(), key.getColumnQualifier().getBytes())); - - Tuple tuple = TupleFactory.getInstance().newTuple(4); - tuple.set(0, ryaStatement.getSubject().getData()); - tuple.set(1, ryaStatement.getPredicate().getData()); - tuple.set(2, ryaStatement.getObject().getData()); - tuple.set(3, (ryaStatement.getContext() != null) ? (ryaStatement.getContext().getData()) : (null)); - return tuple; - } - } catch (Exception e) { - throw new IOException(e); - } - return null; - } -} diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java deleted file mode 100644 index 4b458b6d4..000000000 --- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java +++ /dev/null @@ -1,210 +0,0 @@ -package mvm.rya.accumulo.pig.optimizer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.algebra.*; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; - -import java.util.*; - -/** - * A query optimizer that re-orders nested Joins according to cardinality, preferring joins that have similar variables. - * - */ -public class SimilarVarJoinOptimizer implements QueryOptimizer { - - protected final EvaluationStatistics statistics; - - public SimilarVarJoinOptimizer() { - this(new EvaluationStatistics()); - } - - public SimilarVarJoinOptimizer(EvaluationStatistics statistics) { - this.statistics = statistics; - } - - /** - * Applies generally applicable optimizations: path expressions are sorted - * from more to less specific. - * - * @param tupleExpr - */ - public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) { - tupleExpr.visit(new JoinVisitor()); - } - - protected class JoinVisitor extends QueryModelVisitorBase { - - Set boundVars = new HashSet(); - - @Override - public void meet(LeftJoin leftJoin) { - leftJoin.getLeftArg().visit(this); - - Set origBoundVars = boundVars; - try { - boundVars = new HashSet(boundVars); - boundVars.addAll(leftJoin.getLeftArg().getBindingNames()); - - leftJoin.getRightArg().visit(this); - } finally { - boundVars = origBoundVars; - } - } - - @Override - public void meet(Join node) { - Set origBoundVars = boundVars; - try { - boundVars = new HashSet(boundVars); - - // Recursively get the join arguments - List joinArgs = getJoinArgs(node, new ArrayList()); - - // Build maps of cardinalities and vars per tuple expression - Map cardinalityMap = new HashMap(); - - for (TupleExpr tupleExpr : joinArgs) { - double cardinality = statistics.getCardinality(tupleExpr); - cardinalityMap.put(tupleExpr, cardinality); - } - - // Reorder the (recursive) join arguments to a more optimal sequence - List orderedJoinArgs = new ArrayList(joinArgs.size()); - TupleExpr last = null; - while (!joinArgs.isEmpty()) { - TupleExpr tupleExpr = selectNextTupleExpr(joinArgs, cardinalityMap, last); - if (tupleExpr == null) { - break; - } - - joinArgs.remove(tupleExpr); - orderedJoinArgs.add(tupleExpr); - last = tupleExpr; - - // Recursively optimize join arguments - tupleExpr.visit(this); - - boundVars.addAll(tupleExpr.getBindingNames()); - } - - // Build new join hierarchy - // Note: generated hierarchy is right-recursive to help the - // IterativeEvaluationOptimizer to factor out the left-most join - // argument - int i = 0; - TupleExpr replacement = orderedJoinArgs.get(i); - for (i++; i < orderedJoinArgs.size(); i++) { - replacement = new Join(replacement, orderedJoinArgs.get(i)); - } - - // Replace old join hierarchy - node.replaceWith(replacement); - } finally { - boundVars = origBoundVars; - } - } - - protected > L getJoinArgs(TupleExpr tupleExpr, L joinArgs) { - if (tupleExpr instanceof Join) { - Join join = (Join) tupleExpr; - getJoinArgs(join.getLeftArg(), joinArgs); - getJoinArgs(join.getRightArg(), joinArgs); - } else { - joinArgs.add(tupleExpr); - } - - return joinArgs; - } - - protected List getStatementPatternVars(TupleExpr tupleExpr) { - if(tupleExpr == null) - return null; - List stPatterns = StatementPatternCollector.process(tupleExpr); - List varList = new ArrayList(stPatterns.size() * 4); - for (StatementPattern sp : stPatterns) { - sp.getVars(varList); - } - return varList; - } - - protected > M getVarFreqMap(List varList, M varFreqMap) { - for (Var var : varList) { - Integer freq = varFreqMap.get(var); - freq = (freq == null) ? 1 : freq + 1; - varFreqMap.put(var, freq); - } - return varFreqMap; - } - - /** - * Selects from a list of tuple expressions the next tuple expression that - * should be evaluated. This method selects the tuple expression with - * highest number of bound variables, preferring variables that have been - * bound in other tuple expressions over variables with a fixed value. - */ - protected TupleExpr selectNextTupleExpr(List expressions, - Map cardinalityMap, - TupleExpr last) { - double lowestCardinality = Double.MAX_VALUE; - TupleExpr result = expressions.get(0); - expressions = getExprsWithSameVars(expressions, last); - - for (TupleExpr tupleExpr : expressions) { - // Calculate a score for this tuple expression - double cardinality = cardinalityMap.get(tupleExpr); - - if (cardinality < lowestCardinality) { - // More specific path expression found - lowestCardinality = cardinality; - result = tupleExpr; - } - } - - return result; - } - - protected List getExprsWithSameVars(List expressions, TupleExpr last) { - if(last == null) - return expressions; - List retExprs = new ArrayList(); - for(TupleExpr tupleExpr : expressions) { - List statementPatternVars = getStatementPatternVars(tupleExpr); - List lastVars = getStatementPatternVars(last); - statementPatternVars.retainAll(lastVars); - if(statementPatternVars.size() > 0) { - retExprs.add(tupleExpr); - } - } - if(retExprs.size() == 0) { - return expressions; - } - return retExprs; - } - - } -} diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java deleted file mode 100644 index 119ccb174..000000000 --- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java +++ /dev/null @@ -1,284 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import junit.framework.TestCase; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.accumulo.core.security.TablePermission; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.InputFormat; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.RecordReader; -import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import org.apache.pig.data.Tuple; - -/** - * Created by IntelliJ IDEA. - * Date: 4/20/12 - * Time: 10:17 AM - * To change this template use File | Settings | File Templates. - */ -public class AccumuloStorageTest extends TestCase { - - private String user = "user"; - private String pwd = "pwd"; - private String instance = "myinstance"; - private String table = "testTable"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - - @Override - public void setUp() throws Exception { - super.setUp(); - connector = new MockInstance(instance).getConnector(user, new PasswordToken(pwd.getBytes())); - connector.tableOperations().create(table); - SecurityOperations secOps = connector.securityOperations(); - secOps.createLocalUser(user, new PasswordToken(pwd.getBytes())); - secOps.grantTablePermission(user, table, TablePermission.READ); - secOps.grantTablePermission(user, table, TablePermission.WRITE); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - connector.tableOperations().delete(table); - } - - public void testSimpleOutput() throws Exception { - BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2); - Mutation row = new Mutation("row"); - row.put("cf", "cq", new Value(new byte[0])); - batchWriter.addMutation(row); - batchWriter.flush(); - batchWriter.close(); - - String location = "accumulo://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|z&mock=true"; - AccumuloStorage storage = createAccumuloStorage(location); - int count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(1, count); - } - - public void testRange() throws Exception { - BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2); - Mutation row = new Mutation("a"); - row.put("cf", "cq", new Value(new byte[0])); - batchWriter.addMutation(row); - row = new Mutation("b"); - row.put("cf", "cq", new Value(new byte[0])); - batchWriter.addMutation(row); - row = new Mutation("d"); - row.put("cf", "cq", new Value(new byte[0])); - batchWriter.addMutation(row); - batchWriter.flush(); - batchWriter.close(); - - String location = "accumulo://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&mock=true"; - AccumuloStorage storage = createAccumuloStorage(location); - int count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(2, count); - } - - public void testMultipleRanges() throws Exception { - BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2); - Mutation row = new Mutation("a"); - row.put("cf", "cq", new Value(new byte[0])); - batchWriter.addMutation(row); - row = new Mutation("b"); - row.put("cf", "cq", new Value(new byte[0])); - batchWriter.addMutation(row); - row = new Mutation("d"); - row.put("cf", "cq", new Value(new byte[0])); - batchWriter.addMutation(row); - batchWriter.flush(); - batchWriter.close(); - - String location = "accumulo://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&range=d|e&mock=true"; - List storages = createAccumuloStorages(location); - assertEquals(2, storages.size()); - AccumuloStorage storage = storages.get(0); - int count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(2, count); - storage = storages.get(1); - count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(1, count); - } - - public void testColumns() throws Exception { - BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2); - Mutation row = new Mutation("a"); - row.put("cf1", "cq", new Value(new byte[0])); - row.put("cf2", "cq", new Value(new byte[0])); - row.put("cf3", "cq1", new Value(new byte[0])); - row.put("cf3", "cq2", new Value(new byte[0])); - batchWriter.addMutation(row); - batchWriter.flush(); - batchWriter.close(); - - String location = "accumulo://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&columns=cf1,cf3|cq1&mock=true"; - AccumuloStorage storage = createAccumuloStorage(location); - int count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(2, count); - } - - public void testWholeRowRange() throws Exception { - BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2); - Mutation row = new Mutation("a"); - row.put("cf1", "cq", new Value(new byte[0])); - row.put("cf2", "cq", new Value(new byte[0])); - row.put("cf3", "cq1", new Value(new byte[0])); - row.put("cf3", "cq2", new Value(new byte[0])); - batchWriter.addMutation(row); - batchWriter.flush(); - batchWriter.close(); - - String location = "accumulo://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a&mock=true"; - AccumuloStorage storage = createAccumuloStorage(location); - int count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(4, count); - } - - public void testAuths() throws Exception { - BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2); - Mutation row = new Mutation("a"); - row.put("cf1", "cq1", new ColumnVisibility("A"), new Value(new byte[0])); - row.put("cf2", "cq2", new Value(new byte[0])); - batchWriter.addMutation(row); - batchWriter.flush(); - batchWriter.close(); - - String location = "accumulo://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&mock=true"; - AccumuloStorage storage = createAccumuloStorage(location); - int count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(1, count); - - location = "accumulo://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&auths=A&mock=true"; - storage = createAccumuloStorage(location); - count = 0; - while (true) { - Tuple next = storage.getNext(); - if (next == null) - break; - assertEquals(6, next.size()); - count++; - } - assertEquals(2, count); - } - - protected AccumuloStorage createAccumuloStorage(String location) throws IOException, InterruptedException { - List accumuloStorages = createAccumuloStorages(location); - if (accumuloStorages.size() > 0) { - return accumuloStorages.get(0); - } - return null; - } - - protected List createAccumuloStorages(String location) throws IOException, InterruptedException { - List accumuloStorages = new ArrayList(); - AccumuloStorage storage = new AccumuloStorage(); - InputFormat inputFormat = storage.getInputFormat(); - Job job = new Job(new Configuration()); - storage.setLocation(location, job); - List splits = inputFormat.getSplits(job); - assertNotNull(splits); - - for (InputSplit inputSplit : splits) { - storage = new AccumuloStorage(); - job = new Job(new Configuration()); - storage.setLocation(location, job); - TaskAttemptContext taskAttemptContext = new TaskAttemptContextImpl(job.getConfiguration(), - new TaskAttemptID("jtid", 0, false, 0, 0)); - RecordReader recordReader = inputFormat.createRecordReader(inputSplit, - taskAttemptContext); - recordReader.initialize(inputSplit, taskAttemptContext); - - storage.prepareToRead(recordReader, null); - accumuloStorages.add(storage); - } - return accumuloStorages; - } -} diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java deleted file mode 100644 index 02a6f84c5..000000000 --- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java +++ /dev/null @@ -1,326 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import java.io.File; -import java.io.IOException; -import java.util.Map; -import junit.framework.Assert; -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.util.ToolRunner; -import org.junit.Test; - -public class IndexWritingToolTest { - - - - @Test - public void testIndexWrite() { - - - - Connector accCon = null; - Instance inst; - - String[] args = new String[7]; - - args[0] = "src/test/resources/ResultsFile1.txt"; - args[1] = "src/test/resources/testQuery.txt"; - args[2] = "instance"; - args[3] = "mock"; - args[4] = "user"; - args[5] = "password"; - args[6] = "table"; - - String query = null; - try { - query = FileUtils.readFileToString(new File(args[1])); - } catch (IOException e1) { - - e1.printStackTrace(); - } - - - try { - inst = new MockInstance(args[2]); - accCon = inst.getConnector(args[4], args[5].getBytes()); - if(accCon.tableOperations().exists(args[6])) { - accCon.tableOperations().delete(args[6]); - } - - accCon.tableOperations().create(args[6]); - - } catch (AccumuloException e) { - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - e.printStackTrace(); - } catch (TableExistsException e) { - e.printStackTrace(); - } catch (TableNotFoundException e) { - e.printStackTrace(); - } - - - int result = 5; - try { - result = ToolRunner.run(new IndexWritingTool(), args); - } catch (Exception e) { - e.printStackTrace(); - } - - Assert.assertEquals(0, result); - - Scanner scan = null; - - try { - scan = accCon.createScanner("table", new Authorizations()); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - scan.setRange(new Range()); - - int count = 0; - - for (Map.Entry entry : scan) { - String[] k = entry.getKey().getRow().toString().split("\u0000"); - String[] c = entry.getKey().getColumnFamily().toString().split("\u0000"); - - if(count == 0) { - Assert.assertEquals(k[0], "person10"); - Assert.assertEquals(k[1], "person8"); - Assert.assertEquals(k[2], "person9"); - Assert.assertEquals(c[0],"z"); - Assert.assertEquals(c[1],"x"); - Assert.assertEquals(c[2],"y"); - } - else if(count == 2) { - Assert.assertEquals(k[0], "person2"); - Assert.assertEquals(k[1], "person1"); - Assert.assertEquals(k[2], "person3"); - Assert.assertEquals(c[0],"y"); - Assert.assertEquals(c[1],"x"); - Assert.assertEquals(c[2],"z"); - } - else if(count == 5) { - Assert.assertEquals(k[0], "person3"); - Assert.assertEquals(k[1], "person2"); - Assert.assertEquals(k[2], "person4"); - Assert.assertEquals(c[0],"y"); - Assert.assertEquals(c[1],"x"); - Assert.assertEquals(c[2],"z"); - } - else if(count == 9) { - Assert.assertEquals(k[0], "person5"); - Assert.assertEquals(k[1], "person3"); - Assert.assertEquals(k[2], "person4"); - Assert.assertEquals(c[0],"z"); - Assert.assertEquals(c[1],"x"); - Assert.assertEquals(c[2],"y"); - } - else if(count == 13) { - Assert.assertEquals(k[0], "person6"); - Assert.assertEquals(k[1], "person5"); - Assert.assertEquals(k[2], "person4"); - Assert.assertEquals(c[0],"z"); - Assert.assertEquals(c[1],"y"); - Assert.assertEquals(c[2],"x"); - } - else if(count == 17) { - Assert.assertEquals(k[0], "person7"); - Assert.assertEquals(k[1], "person6"); - Assert.assertEquals(k[2], "person8"); - Assert.assertEquals(c[0],"y"); - Assert.assertEquals(c[1],"x"); - Assert.assertEquals(c[2],"z"); - } - else if(count == 21) { - Assert.assertEquals(k[0], "person9"); - Assert.assertEquals(k[1], "person7"); - Assert.assertEquals(k[2], "person8"); - Assert.assertEquals(c[0],"z"); - Assert.assertEquals(c[1],"x"); - Assert.assertEquals(c[2],"y"); - } else if(count == 24) { - Assert.assertEquals(query, entry.getValue().toString()); - String[] varOrders = entry.getKey().getColumnQualifier().toString().split("\u0000"); - Assert.assertEquals(3,varOrders.length); - Assert.assertEquals(varOrders[0],"z;y;x"); - Assert.assertEquals(varOrders[1],"y;x;z"); - Assert.assertEquals(varOrders[2],"z;x;y"); - - } else { - Assert.assertTrue(k[0].startsWith("person")); - Assert.assertTrue(k[1].startsWith("person")); - Assert.assertTrue(k[2].startsWith("person")); - - } - - count ++; - } - - Assert.assertEquals(25, count); - - - - - } - - - - - - - - @Test - public void testIndexWrite2() { - - - - Connector accCon = null; - Instance inst; - - String[] args = new String[7]; - - args[0] = "src/test/resources/ResultsFile1.txt"; - args[1] = "src/test/resources/testQuery2.txt"; - args[2] = "instance"; - args[3] = "mock"; - args[4] = "user"; - args[5] = "password"; - args[6] = "table"; - - String query = null; - try { - query = FileUtils.readFileToString(new File(args[1])); - } catch (IOException e1) { - - e1.printStackTrace(); - } - - - try { - inst = new MockInstance(args[2]); - accCon = inst.getConnector(args[4], args[5].getBytes()); - if(accCon.tableOperations().exists(args[6])) { - accCon.tableOperations().delete(args[6]); - } - - accCon.tableOperations().create(args[6]); - - } catch (AccumuloException e) { - - e.printStackTrace(); - } catch (AccumuloSecurityException e) { - - e.printStackTrace(); - } catch (TableExistsException e) { - - e.printStackTrace(); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - - - int result = 5; - try { - result = ToolRunner.run(new IndexWritingTool(), args); - } catch (Exception e) { - - e.printStackTrace(); - } - - Assert.assertEquals(0, result); - - Scanner scan = null; - - try { - scan = accCon.createScanner("table", new Authorizations()); - } catch (TableNotFoundException e) { - - e.printStackTrace(); - } - scan.setRange(new Range()); - - int count = 0; - - for (Map.Entry entry : scan) { - String[] k = entry.getKey().getRow().toString().split("\u0000"); - String[] c = entry.getKey().getColumnFamily().toString().split("\u0000"); - - if(count == 0) { - Assert.assertEquals(k[0], "person1"); - Assert.assertEquals(k[1], "person2"); - Assert.assertEquals(k[2], "person3"); - Assert.assertEquals(c[0],"x"); - Assert.assertEquals(c[1],"y"); - Assert.assertEquals(c[2],"z"); - } - else if(count == 2) { - Assert.assertEquals(k[0], "person3"); - Assert.assertEquals(k[1], "person4"); - Assert.assertEquals(k[2], "person5"); - Assert.assertEquals(c[0],"x"); - Assert.assertEquals(c[1],"y"); - Assert.assertEquals(c[2],"z"); - } - else if(count == 5) { - Assert.assertEquals(k[0], "person6"); - Assert.assertEquals(k[1], "person7"); - Assert.assertEquals(k[2], "person8"); - Assert.assertEquals(c[0],"x"); - Assert.assertEquals(c[1],"y"); - Assert.assertEquals(c[2],"z"); - } - - - count ++; - System.out.println(count); - } - - Assert.assertEquals(9, count); - - - - - } - - - - - - - - -} diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java deleted file mode 100644 index e4cf10e70..000000000 --- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java +++ /dev/null @@ -1,76 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import org.apache.pig.ExecType; - -/** - * Created by IntelliJ IDEA. - * Date: 4/23/12 - * Time: 10:14 AM - * To change this template use File | Settings | File Templates. - */ -public class SparqlQueryPigEngineTest extends TestCase { - - private SparqlQueryPigEngine engine; - - @Override - public void setUp() throws Exception { - super.setUp(); - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix("l_"); - visitor.setInstance("stratus"); - visitor.setZk("stratus13:2181"); - visitor.setUser("root"); - visitor.setPassword("password"); - - engine = new SparqlQueryPigEngine(); - engine.setSparqlToPigTransformVisitor(visitor); - engine.setExecType(ExecType.LOCAL); - engine.setInference(false); - engine.setStats(false); - engine.init(); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - engine.destroy(); - } - - public void testStatementPattern() throws Exception { - String query = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - "PREFIX rdfs: \n" + - " SELECT * WHERE\n" + - " {\n" + - "\t ?p ?o\n" + - " }\n" + - ""; - -// engine.runQuery(query, "/temp/testSP"); - assertTrue(engine.generatePigScript(query).contains("A = load 'accumulo://l_?instance=stratus&user=root&password=password&zookeepers=stratus13:2181&subject=' using mvm.rya.accumulo.pig.StatementPatternStorage() AS (A_s:chararray, p:chararray, o:chararray);\n" + - "PROJ = FOREACH A GENERATE p,o;")); - - } -} diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java deleted file mode 100644 index b011a2480..000000000 --- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java +++ /dev/null @@ -1,402 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.accumulo.pig.optimizer.SimilarVarJoinOptimizer; -import org.openrdf.query.algebra.QueryRoot; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.QueryParser; -import org.openrdf.query.parser.sparql.SPARQLParser; - -/** - * Created by IntelliJ IDEA. - * Date: 4/12/12 - * Time: 10:18 AM - * To change this template use File | Settings | File Templates. - */ -public class SparqlToPigTransformVisitorTest extends TestCase { - - private String zk; - private String instance; - private String tablePrefix; - private String user; - private String password; - - protected void setUp() throws Exception { - super.setUp(); - zk = "zoo"; - instance = "instance"; - tablePrefix = "l_"; - user = "root"; - password = "root"; - } - - public void testStatementPattern() throws Exception { - String query = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - "PREFIX rdfs: \n" + - " SELECT * WHERE\n" + - " {\n" + - "\t?x rdf:type ub:UndergraduateStudent\n" + - " }\n" + - ""; - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - -// System.out.println(parsedQuery); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); -// System.out.println(visitor.getPigScript()); - } - - public void testStatementPatternContext() throws Exception { - String query = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - "PREFIX rdfs: \n" + - " SELECT * WHERE\n" + - " {\n" + - " GRAPH ub:g1 {\n" + - "\t?x rdf:type ub:UndergraduateStudent\n" + - " }\n" + - " }\n" + - ""; - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - -// System.out.println(parsedQuery); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); -// System.out.println(visitor.getPigScript()); - } - - public void testStatementPatternContextVar() throws Exception { - String query = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - "PREFIX rdfs: \n" + - " SELECT * WHERE\n" + - " {\n" + - " GRAPH ?g {\n" + - "\t?x rdf:type ub:UndergraduateStudent\n" + - " }\n" + - " ?x ub:pred ?g." + - " }\n" + - ""; - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - -// System.out.println(parsedQuery); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); -// System.out.println(visitor.getPigScript()); - } - - public void testJoin() throws Exception { - String query = "select * where {\n" + - "?subj 'Department0'.\n" + - "?subj .\n" + - "}"; -// System.out.println(query); - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - -// System.out.println(parsedQuery); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); -// System.out.println(visitor.getPigScript()); - } - - public void testMutliReturnJoin() throws Exception { - String query = "select * where {\n" + - "?subj 'Department0'.\n" + - "?subj ?suborg.\n" + - "}"; - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - - System.out.println(query); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); - System.out.println(visitor.getPigScript()); - } - - public void testMutlipleJoins() throws Exception { - String query = "select * where {\n" + - "?subj 'Department0'.\n" + - "?subj .\n" + - "?subj .\n" + - "}"; -// System.out.println(query); - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - -// System.out.println(parsedQuery); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); -// System.out.println(visitor.getPigScript()); - } - - public void testCross() throws Exception { - String query = "select * where {\n" + - "?subj0 'Department0'.\n" + - "?subj1 'Department1'.\n" + - "?subj0 .\n" + - "?subj1 .\n" + - "}"; -// System.out.println(query); - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - QueryRoot tupleExpr = new QueryRoot(parsedQuery.getTupleExpr()); - - SimilarVarJoinOptimizer similarVarJoinOptimizer = new SimilarVarJoinOptimizer(); - similarVarJoinOptimizer.optimize(tupleExpr, null, null); - -// System.out.println(tupleExpr); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(tupleExpr); -// System.out.println(visitor.getPigScript()); - } - - public void testLimit() throws Exception { - String query = "select * where {\n" + - "?subj 'Department0'.\n" + - "?subj ?suborg.\n" + - "} limit 100"; -// System.out.println(query); - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - -// System.out.println(parsedQuery); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); -// System.out.println(visitor.getPigScript()); - } - - public void testHardQuery() throws Exception { - String query = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?y rdf:type ub:University .\n" + - " ?z ub:subOrganizationOf ?y .\n" + - " ?z rdf:type ub:Department .\n" + - " ?x ub:memberOf ?z .\n" + - " ?x ub:undergraduateDegreeFrom ?y .\n" + - " ?x rdf:type ub:GraduateStudent .\n" + - " }\n" + - "limit 100"; -// String query = "PREFIX rdf: \n" + -// " PREFIX ub: \n" + -// " SELECT * WHERE\n" + -// " {\n" + -// "\t?x ub:advisor ?y.\n" + -// "\t?y ub:teacherOf ?z.\n" + -// "\t?x ub:takesCourse ?z.\n" + -// "\t?x rdf:type ub:Student.\n" + -// "\t?y rdf:type ub:Faculty.\n" + -// "\t?z rdf:type ub:Course.\n" + -// " }\n" + -// "limit 100"; -// System.out.println(query); - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - - TupleExpr tupleExpr = parsedQuery.getTupleExpr(); - -// CloudbaseRdfEvalStatsDAO rdfEvalStatsDAO = new CloudbaseRdfEvalStatsDAO(); -// rdfEvalStatsDAO.setConnector(new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes())); -// rdfEvalStatsDAO.setEvalTable("l_eval"); -// RdfCloudTripleStoreEvaluationStatistics stats = new RdfCloudTripleStoreEvaluationStatistics(new Configuration(), rdfEvalStatsDAO); -// (new SimilarVarJoinOptimizer(stats)).optimize(tupleExpr, null, null); - -// System.out.println(tupleExpr); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(tupleExpr)); - //System.out.println(visitor.getPigScript()); - } - - public void testFixedStatementPatternInferenceQuery() throws Exception { - String query = "PREFIX rdf: \n" + - " PREFIX rdfs: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?y ub:memberOf .\n" + - " {?y rdf:type ub:Professor.}\n" + - " UNION \n" + - " {?y rdf:type ub:GraduateStudent.}\n" + - " }"; -// System.out.println(query); - QueryParser parser = new SPARQLParser(); - ParsedQuery parsedQuery = parser.parseQuery(query, null); - - TupleExpr tupleExpr = parsedQuery.getTupleExpr(); - -// Configuration conf = new Configuration(); -// Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes()); -// -// InferenceEngine inferenceEngine = new InferenceEngine(); -// CloudbaseRdfDAO rdfDAO = new CloudbaseRdfDAO(); -// rdfDAO.setConf(conf); -// rdfDAO.setConnector(connector); -// rdfDAO.setNamespaceTable("l_ns"); -// rdfDAO.setSpoTable("l_spo"); -// rdfDAO.setPoTable("l_po"); -// rdfDAO.setOspTable("l_osp"); -// rdfDAO.init(); -// -// inferenceEngine.setRdfDao(rdfDAO); -// inferenceEngine.setConf(conf); -// inferenceEngine.init(); -// -// tupleExpr.visit(new TransitivePropertyVisitor(conf, inferenceEngine)); -// tupleExpr.visit(new SymmetricPropertyVisitor(conf, inferenceEngine)); -// tupleExpr.visit(new InverseOfVisitor(conf, inferenceEngine)); -// tupleExpr.visit(new SubPropertyOfVisitor(conf, inferenceEngine)); -// tupleExpr.visit(new SubClassOfVisitor(conf, inferenceEngine)); -// -// CloudbaseRdfEvalStatsDAO rdfEvalStatsDAO = new CloudbaseRdfEvalStatsDAO(); -// rdfEvalStatsDAO.setConnector(connector); -// rdfEvalStatsDAO.setEvalTable("l_eval"); -// RdfCloudTripleStoreEvaluationStatistics stats = new RdfCloudTripleStoreEvaluationStatistics(conf, rdfEvalStatsDAO); -// (new QueryJoinOptimizer(stats)).optimize(tupleExpr, null, null); - -// System.out.println(tupleExpr); - - SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); - visitor.setTablePrefix(tablePrefix); - visitor.setInstance(instance); - visitor.setZk(zk); - visitor.setUser(user); - visitor.setPassword(password); - visitor.meet(new QueryRoot(tupleExpr)); -// System.out.println(visitor.getPigScript()); - } - -// public void testInverseOf() throws Exception { -// String query = "PREFIX rdf: \n" + -// " PREFIX ub: \n" + -// " SELECT * WHERE\n" + -// " {\n" + -// " ?x rdf:type ub:Person .\n" + -// " ub:hasAlumnus ?x .\n" + -// " } "; -// System.out.println(query); -// QueryParser parser = new SPARQLParser(); -// ParsedQuery parsedQuery = parser.parseQuery(query, null); -// TupleExpr tupleExpr = parsedQuery.getTupleExpr(); -// -// Configuration conf = new Configuration(); -// Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes()); -// -// InferenceEngine inferenceEngine = new InferenceEngine(); -// CloudbaseRdfDAO rdfDAO = new CloudbaseRdfDAO(); -// rdfDAO.setConf(conf); -// rdfDAO.setConnector(connector); -// rdfDAO.setNamespaceTable("l_ns"); -// rdfDAO.setSpoTable("l_spo"); -// rdfDAO.setPoTable("l_po"); -// rdfDAO.setOspTable("l_osp"); -// rdfDAO.init(); -// -// inferenceEngine.setRdfDao(rdfDAO); -// inferenceEngine.setConf(conf); -// inferenceEngine.init(); -// -// tupleExpr.visit(new TransitivePropertyVisitor(conf, inferenceEngine)); -// tupleExpr.visit(new SymmetricPropertyVisitor(conf, inferenceEngine)); -// tupleExpr.visit(new InverseOfVisitor(conf, inferenceEngine)); -// -// CloudbaseRdfEvalStatsDAO rdfEvalStatsDAO = new CloudbaseRdfEvalStatsDAO(); -// rdfEvalStatsDAO.setConnector(connector); -// rdfEvalStatsDAO.setEvalTable("l_eval"); -// RdfCloudTripleStoreEvaluationStatistics stats = new RdfCloudTripleStoreEvaluationStatistics(conf, rdfEvalStatsDAO); -// (new QueryJoinOptimizer(stats)).optimize(tupleExpr, null, null); -// -// -// System.out.println(tupleExpr); -// -// SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor(); -// visitor.setTablePrefix("l_"); -// visitor.setInstance("stratus"); -// visitor.setZk("stratus13:2181"); -// visitor.setUser("root"); -// visitor.setPassword("password"); -// visitor.meet(new QueryRoot(parsedQuery.getTupleExpr())); -// System.out.println(visitor.getPigScript()); -// } -} diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java deleted file mode 100644 index 5bc4a3499..000000000 --- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java +++ /dev/null @@ -1,185 +0,0 @@ -package mvm.rya.accumulo.pig; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.TablePermission; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.InputFormat; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.RecordReader; -import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import org.apache.pig.data.Tuple; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -/** - * Created by IntelliJ IDEA. - * Date: 4/20/12 - * Time: 5:14 PM - * To change this template use File | Settings | File Templates. - */ -public class StatementPatternStorageTest extends TestCase { - - private String user = "user"; - private String pwd = "pwd"; - private String instance = "myinstance"; - private String tablePrefix = "t_"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - private AccumuloRyaDAO ryaDAO; - private ValueFactory vf = new ValueFactoryImpl(); - private String namespace = "urn:test#"; - private AccumuloRdfConfiguration conf; - - @Override - public void setUp() throws Exception { - super.setUp(); - connector = new MockInstance(instance).getConnector(user, pwd.getBytes()); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - SecurityOperations secOps = connector.securityOperations(); - secOps.createUser(user, pwd.getBytes(), auths); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ); - - conf = new AccumuloRdfConfiguration(); - ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConnector(connector); - conf.setTablePrefix(tablePrefix); - ryaDAO.setConf(conf); - ryaDAO.init(); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - } - - public void testSimplePredicateRange() throws Exception { - ryaDAO.add(new RyaStatement(new RyaURI(namespace, "a"),new RyaURI(namespace,"p"), new RyaType("l"))); - ryaDAO.add(new RyaStatement(new RyaURI(namespace, "b"), new RyaURI(namespace, "p"), new RyaType("l"))); - ryaDAO.add(new RyaStatement(new RyaURI(namespace, "c"), new RyaURI(namespace, "n"), new RyaType("l"))); - - - int count = 0; - List storages = createStorages("accumulo://" + tablePrefix + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&predicate=<" + namespace + "p>&mock=true"); - for (StatementPatternStorage storage : storages) { - while (true) { - Tuple next = storage.getNext(); - if (next == null) { - break; - } - count++; - } - } - assertEquals(2, count); - ryaDAO.destroy(); - } - - public void testContext() throws Exception { - ryaDAO.add(new RyaStatement(new RyaURI(namespace, "a"), new RyaURI(namespace, "p"), new RyaType("l1"))); - ryaDAO.add(new RyaStatement(new RyaURI(namespace, "a"), new RyaURI(namespace, "p"), new RyaType("l2"), new RyaURI(namespace, "g1"))); - - - int count = 0; - List storages = createStorages("accumulo://" + tablePrefix + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&predicate=<" + namespace + "p>&mock=true"); - for (StatementPatternStorage storage : storages) { - while (true) { - Tuple next = storage.getNext(); - if (next == null) { - break; - } - count++; - } - } - assertEquals(2, count); - - count = 0; - storages = createStorages("accumulo://" + tablePrefix + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&predicate=<" + namespace + "p>&context=<"+namespace+"g1>&mock=true"); - for (StatementPatternStorage storage : storages) { - while (true) { - Tuple next = storage.getNext(); - if (next == null) { - break; - } - count++; - } - } - assertEquals(1, count); - - ryaDAO.destroy(); - } - - protected List createStorages(String location) throws IOException, InterruptedException { - List storages = new ArrayList(); - StatementPatternStorage storage = new StatementPatternStorage(); - InputFormat inputFormat = storage.getInputFormat(); - Job job = new Job(new Configuration()); - storage.setLocation(location, job); - List splits = inputFormat.getSplits(job); - assertNotNull(splits); - - for (InputSplit inputSplit : splits) { - storage = new StatementPatternStorage(); - job = new Job(new Configuration()); - storage.setLocation(location, job); - TaskAttemptContext taskAttemptContext = new TaskAttemptContextImpl(job.getConfiguration(), - new TaskAttemptID("jtid", 0, false, 0, 0)); - RecordReader recordReader = inputFormat.createRecordReader(inputSplit, - taskAttemptContext); - recordReader.initialize(inputSplit, taskAttemptContext); - - storage.prepareToRead(recordReader, null); - storages.add(storage); - } - return storages; - } - -} diff --git a/pig/accumulo.pig/src/test/resources/ResultsFile1.txt b/pig/accumulo.pig/src/test/resources/ResultsFile1.txt deleted file mode 100644 index e1dcc9298..000000000 --- a/pig/accumulo.pig/src/test/resources/ResultsFile1.txt +++ /dev/null @@ -1,8 +0,0 @@ -person1 person2 person3 -person2 person3 person4 -person3 person4 person5 -person4 person5 person6 -person5 person6 person7 -person6 person7 person8 -person7 person8 person9 -person8 person9 person10 diff --git a/pig/accumulo.pig/src/test/resources/testQuery.txt b/pig/accumulo.pig/src/test/resources/testQuery.txt deleted file mode 100644 index 8f5d02259..000000000 --- a/pig/accumulo.pig/src/test/resources/testQuery.txt +++ /dev/null @@ -1,7 +0,0 @@ -#prefix z, y, x -#prefix y, x, z -#prefix z, x, y -SELECT ?x ?y ?z WHERE { -?x ?y. -?y ?z. -} \ No newline at end of file diff --git a/pig/accumulo.pig/src/test/resources/testQuery2.txt b/pig/accumulo.pig/src/test/resources/testQuery2.txt deleted file mode 100644 index 476b39c63..000000000 --- a/pig/accumulo.pig/src/test/resources/testQuery2.txt +++ /dev/null @@ -1,4 +0,0 @@ -SELECT ?x ?y ?z WHERE { -?x ?y. -?y ?z. -} \ No newline at end of file diff --git a/pig/pom.xml b/pig/pom.xml deleted file mode 100644 index 2df2d1ce6..000000000 --- a/pig/pom.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - - rya.pig - Apache Rya Pig Projects - - pom - - - accumulo.pig - - diff --git a/sail/pom.xml b/sail/pom.xml deleted file mode 100644 index dfd281156..000000000 --- a/sail/pom.xml +++ /dev/null @@ -1,98 +0,0 @@ - - - - - 4.0.0 - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - - rya.sail - Apache Rya SAIL - - - - org.apache.rya - rya.api - - - org.apache.rya - rya.provenance - - - org.apache.rya - rya.prospector - - - - net.sf.ehcache - ehcache-core - - - - org.apache.hadoop - hadoop-common - - - - com.tinkerpop.blueprints - blueprints-core - - - - org.openrdf.sesame - sesame-runtime - - - - - org.mockito - mockito-all - test - - - junit - junit - test - - - org.apache.rya - accumulo.rya - test - - - - - - - - org.apache.rat - apache-rat-plugin - - - **/resources/META-INF/org.openrdf.store.schemas - - - - - - diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java deleted file mode 100644 index 4fcc72655..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java +++ /dev/null @@ -1,179 +0,0 @@ -package mvm.rya.rdftriplestore; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.joinselect.SelectivityEvalDAO; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.namespace.NamespaceManager; -import mvm.rya.rdftriplestore.provenance.ProvenanceCollector; - -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.sail.SailConnection; -import org.openrdf.sail.SailException; -import org.openrdf.sail.helpers.SailBase; - -import static com.google.common.base.Preconditions.checkNotNull; - -public class RdfCloudTripleStore extends SailBase { - - private RdfCloudTripleStoreConfiguration conf; - - protected RyaDAO ryaDAO; - protected InferenceEngine inferenceEngine; - protected RdfEvalStatsDAO rdfEvalStatsDAO; - protected SelectivityEvalDAO selectEvalDAO; - private NamespaceManager namespaceManager; - protected ProvenanceCollector provenanceCollector; - - private ValueFactory vf = new ValueFactoryImpl(); - - @Override - protected SailConnection getConnectionInternal() throws SailException { - return new RdfCloudTripleStoreConnection(this, conf, vf); - } - - @Override - protected void initializeInternal() throws SailException { - checkNotNull(ryaDAO); - - if (this.conf == null) { - this.conf = ryaDAO.getConf(); - } - - checkNotNull(this.conf); - - try { - if (!ryaDAO.isInitialized()) { - ryaDAO.setConf(this.conf); - ryaDAO.init(); - } - } catch (RyaDAOException e) { - throw new SailException(e); - } - - if (rdfEvalStatsDAO != null && !rdfEvalStatsDAO.isInitialized()) { - rdfEvalStatsDAO.setConf(this.conf); - rdfEvalStatsDAO.init(); - } - - //TODO: Support inferencing with ryadao -// if (inferenceEngine != null && !inferenceEngine.isInitialized()) { -// inferenceEngine.setConf(this.conf); -// inferenceEngine.setRyaDAO(ryaDAO); -// inferenceEngine.init(); -// } - - if (namespaceManager == null) { - this.namespaceManager = new NamespaceManager(ryaDAO, this.conf); - } - } - - @Override - protected void shutDownInternal() throws SailException { - try { - if (namespaceManager != null) { - namespaceManager.shutdown(); - } - if (inferenceEngine != null) { - inferenceEngine.destroy(); - } - if (rdfEvalStatsDAO != null) { - rdfEvalStatsDAO.destroy(); - } - ryaDAO.destroy(); - } catch (Exception e) { - throw new SailException(e); - } - } - - @Override - public ValueFactory getValueFactory() { - return vf; - } - - @Override - public boolean isWritable() throws SailException { - return true; - } - - public RdfCloudTripleStoreConfiguration getConf() { - return conf; - } - - public void setConf(RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - } - - public RdfEvalStatsDAO getRdfEvalStatsDAO() { - return rdfEvalStatsDAO; - } - - public void setRdfEvalStatsDAO(RdfEvalStatsDAO rdfEvalStatsDAO) { - this.rdfEvalStatsDAO = rdfEvalStatsDAO; - } - - public SelectivityEvalDAO getSelectEvalDAO() { - return selectEvalDAO; - } - - public void setSelectEvalDAO(SelectivityEvalDAO selectEvalDAO) { - this.selectEvalDAO = selectEvalDAO; - } - - public RyaDAO getRyaDAO() { - return ryaDAO; - } - - public void setRyaDAO(RyaDAO ryaDAO) { - this.ryaDAO = ryaDAO; - } - - public InferenceEngine getInferenceEngine() { - return inferenceEngine; - } - - public void setInferenceEngine(InferenceEngine inferenceEngine) { - this.inferenceEngine = inferenceEngine; - } - - public NamespaceManager getNamespaceManager() { - return namespaceManager; - } - - public void setNamespaceManager(NamespaceManager namespaceManager) { - this.namespaceManager = namespaceManager; - } - - public ProvenanceCollector getProvenanceCollector() { - return provenanceCollector; - } - - public void setProvenanceCollector(ProvenanceCollector provenanceCollector) { - this.provenanceCollector = provenanceCollector; - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java deleted file mode 100644 index 24ec1097b..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java +++ /dev/null @@ -1,623 +0,0 @@ -package mvm.rya.rdftriplestore; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; -import info.aduna.iteration.CloseableIteration; - -import java.lang.reflect.Constructor; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.NoSuchElementException; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.joinselect.SelectivityEvalDAO; -import mvm.rya.api.persist.utils.RyaDAOHelper; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.rdftriplestore.evaluation.FilterRangeVisitor; -import mvm.rya.rdftriplestore.evaluation.ParallelEvaluationStrategyImpl; -import mvm.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer; -import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics; -import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics; -import mvm.rya.rdftriplestore.evaluation.SeparateFilterJoinsVisitor; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.inference.InverseOfVisitor; -import mvm.rya.rdftriplestore.inference.SameAsVisitor; -import mvm.rya.rdftriplestore.inference.SubClassOfVisitor; -import mvm.rya.rdftriplestore.inference.SubPropertyOfVisitor; -import mvm.rya.rdftriplestore.inference.SymmetricPropertyVisitor; -import mvm.rya.rdftriplestore.inference.TransitivePropertyVisitor; -import mvm.rya.rdftriplestore.namespace.NamespaceManager; -import mvm.rya.rdftriplestore.provenance.ProvenanceCollectionException; -import mvm.rya.rdftriplestore.provenance.ProvenanceCollector; -import mvm.rya.rdftriplestore.utils.DefaultStatistics; - -import org.apache.hadoop.conf.Configurable; -import org.openrdf.model.Namespace; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ContextStatementImpl; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.query.Binding; -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.QueryRoot; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.EvaluationStrategy; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.evaluation.TripleSource; -import org.openrdf.query.algebra.evaluation.impl.BindingAssigner; -import org.openrdf.query.algebra.evaluation.impl.CompareOptimizer; -import org.openrdf.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter; -import org.openrdf.query.algebra.evaluation.impl.ConstantOptimizer; -import org.openrdf.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer; -import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics; -import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer; -import org.openrdf.query.algebra.evaluation.impl.IterativeEvaluationOptimizer; -import org.openrdf.query.algebra.evaluation.impl.OrderLimitOptimizer; -import org.openrdf.query.algebra.evaluation.impl.QueryModelNormalizer; -import org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer; -import org.openrdf.query.impl.EmptyBindingSet; -import org.openrdf.sail.SailException; -import org.openrdf.sail.helpers.SailConnectionBase; - -public class RdfCloudTripleStoreConnection extends SailConnectionBase { - - private RdfCloudTripleStore store; - - private RdfEvalStatsDAO rdfEvalStatsDAO; - private SelectivityEvalDAO selectEvalDAO; - private RyaDAO ryaDAO; - private InferenceEngine inferenceEngine; - private NamespaceManager namespaceManager; - private RdfCloudTripleStoreConfiguration conf; - - - private ProvenanceCollector provenanceCollector; - - public RdfCloudTripleStoreConnection(RdfCloudTripleStore sailBase, RdfCloudTripleStoreConfiguration conf, ValueFactory vf) - throws SailException { - super(sailBase); - this.store = sailBase; - this.conf = conf; - initialize(); - } - - protected void initialize() throws SailException { - refreshConnection(); - } - - protected void refreshConnection() throws SailException { - try { - checkNotNull(store.getRyaDAO()); - checkArgument(store.getRyaDAO().isInitialized()); - checkNotNull(store.getNamespaceManager()); - - this.ryaDAO = store.getRyaDAO(); - this.rdfEvalStatsDAO = store.getRdfEvalStatsDAO(); - this.selectEvalDAO = store.getSelectEvalDAO(); - this.inferenceEngine = store.getInferenceEngine(); - this.namespaceManager = store.getNamespaceManager(); - this.provenanceCollector = store.getProvenanceCollector(); - - } catch (Exception e) { - throw new SailException(e); - } - } - - @Override - protected void addStatementInternal(Resource subject, URI predicate, - Value object, Resource... contexts) throws SailException { - try { - String cv_s = conf.getCv(); - byte[] cv = cv_s == null ? null : cv_s.getBytes(); - if (contexts != null && contexts.length > 0) { - for (Resource context : contexts) { - RyaStatement statement = new RyaStatement( - RdfToRyaConversions.convertResource(subject), - RdfToRyaConversions.convertURI(predicate), - RdfToRyaConversions.convertValue(object), - RdfToRyaConversions.convertResource(context), - null, cv); - - ryaDAO.add(statement); - } - } else { - RyaStatement statement = new RyaStatement( - RdfToRyaConversions.convertResource(subject), - RdfToRyaConversions.convertURI(predicate), - RdfToRyaConversions.convertValue(object), - null, null, cv); - - ryaDAO.add(statement); - } - } catch (RyaDAOException e) { - throw new SailException(e); - } - } - - - - - @Override - protected void clearInternal(Resource... aresource) throws SailException { - try { - RyaURI[] graphs = new RyaURI[aresource.length]; - for (int i = 0 ; i < graphs.length ; i++){ - graphs[i] = RdfToRyaConversions.convertResource(aresource[i]); - } - ryaDAO.dropGraph(conf, graphs); - } catch (RyaDAOException e) { - throw new SailException(e); - } - } - - @Override - protected void clearNamespacesInternal() throws SailException { - logger.error("Clear Namespace Repository method not implemented"); - } - - @Override - protected void closeInternal() throws SailException { - verifyIsOpen(); - } - - @Override - protected void commitInternal() throws SailException { - verifyIsOpen(); - //There is no transactional layer - } - - @Override - protected CloseableIteration evaluateInternal( - TupleExpr tupleExpr, Dataset dataset, BindingSet bindings, - boolean flag) throws SailException { - verifyIsOpen(); - logger.trace("Incoming query model:\n{}", tupleExpr.toString()); - if (provenanceCollector != null){ - try { - provenanceCollector.recordQuery(tupleExpr.toString()); - } catch (ProvenanceCollectionException e) { - // TODO silent fail - e.printStackTrace(); - } - } - tupleExpr = tupleExpr.clone(); - - RdfCloudTripleStoreConfiguration queryConf = store.getConf().clone(); - if (bindings != null) { - Binding dispPlan = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG); - if (dispPlan != null) { - queryConf.setDisplayQueryPlan(Boolean.parseBoolean(dispPlan.getValue().stringValue())); - } - - Binding authBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH); - if (authBinding != null) { - queryConf.setAuths(authBinding.getValue().stringValue().split(",")); - } - - Binding ttlBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_TTL); - if (ttlBinding != null) { - queryConf.setTtl(Long.valueOf(ttlBinding.getValue().stringValue())); - } - - Binding startTimeBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_STARTTIME); - if (startTimeBinding != null) { - queryConf.setStartTime(Long.valueOf(startTimeBinding.getValue().stringValue())); - } - - Binding performantBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT); - if (performantBinding != null) { - queryConf.setBoolean(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT, Boolean.parseBoolean(performantBinding.getValue().stringValue())); - } - - Binding inferBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_INFER); - if (inferBinding != null) { - queryConf.setInfer(Boolean.parseBoolean(inferBinding.getValue().stringValue())); - } - - Binding useStatsBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_USE_STATS); - if (useStatsBinding != null) { - queryConf.setUseStats(Boolean.parseBoolean(useStatsBinding.getValue().stringValue())); - } - - Binding offsetBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_OFFSET); - if (offsetBinding != null) { - queryConf.setOffset(Long.parseLong(offsetBinding.getValue().stringValue())); - } - - Binding limitBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_LIMIT); - if (limitBinding != null) { - queryConf.setLimit(Long.parseLong(limitBinding.getValue().stringValue())); - } - } else { - bindings = new QueryBindingSet(); - } - - if (!(tupleExpr instanceof QueryRoot)) { - tupleExpr = new QueryRoot(tupleExpr); - } - - try { - List> optimizers = queryConf.getOptimizers(); - Class pcjOptimizer = queryConf.getPcjOptimizer(); - - if(pcjOptimizer != null) { - QueryOptimizer opt = null; - try { - Constructor construct = pcjOptimizer.getDeclaredConstructor(new Class[] {}); - opt = construct.newInstance(); - } catch (Exception e) { - } - if (opt == null) { - throw new NoSuchMethodException("Could not find valid constructor for " + pcjOptimizer.getName()); - } - if (opt instanceof Configurable) { - ((Configurable) opt).setConf(conf); - } - opt.optimize(tupleExpr, dataset, bindings); - } - - final ParallelEvaluationStrategyImpl strategy = new ParallelEvaluationStrategyImpl( - new StoreTripleSource(queryConf), inferenceEngine, dataset, queryConf); - - (new BindingAssigner()).optimize(tupleExpr, dataset, bindings); - (new ConstantOptimizer(strategy)).optimize(tupleExpr, dataset, - bindings); - (new CompareOptimizer()).optimize(tupleExpr, dataset, bindings); - (new ConjunctiveConstraintSplitter()).optimize(tupleExpr, dataset, - bindings); - (new DisjunctiveConstraintOptimizer()).optimize(tupleExpr, dataset, - bindings); - (new SameTermFilterOptimizer()).optimize(tupleExpr, dataset, - bindings); - (new QueryModelNormalizer()).optimize(tupleExpr, dataset, bindings); - - (new IterativeEvaluationOptimizer()).optimize(tupleExpr, dataset, - bindings); - - if (!optimizers.isEmpty()) { - for (Class optclz : optimizers) { - QueryOptimizer result = null; - try { - Constructor meth = optclz.getDeclaredConstructor(new Class[] {}); - result = meth.newInstance(); - } catch (Exception e) { - } - try { - Constructor meth = optclz.getDeclaredConstructor(EvaluationStrategy.class); - result = meth.newInstance(strategy); - } catch (Exception e) { - } - if (result == null) { - throw new NoSuchMethodException("Could not find valid constructor for " + optclz.getName()); - } - if (result instanceof Configurable) { - ((Configurable) result).setConf(conf); - } - result.optimize(tupleExpr, dataset, bindings); - } - } - - (new FilterOptimizer()).optimize(tupleExpr, dataset, bindings); - (new OrderLimitOptimizer()).optimize(tupleExpr, dataset, bindings); - - logger.trace("Optimized query model:\n{}", tupleExpr.toString()); - - if (queryConf.isInfer() - && this.inferenceEngine != null - ) { - try { - tupleExpr.visit(new TransitivePropertyVisitor(queryConf, inferenceEngine)); - tupleExpr.visit(new SymmetricPropertyVisitor(queryConf, inferenceEngine)); - tupleExpr.visit(new InverseOfVisitor(queryConf, inferenceEngine)); - tupleExpr.visit(new SubPropertyOfVisitor(queryConf, inferenceEngine)); - tupleExpr.visit(new SubClassOfVisitor(queryConf, inferenceEngine)); - tupleExpr.visit(new SameAsVisitor(queryConf, inferenceEngine)); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (queryConf.isPerformant()) { - tupleExpr.visit(new SeparateFilterJoinsVisitor()); -// tupleExpr.visit(new FilterTimeIndexVisitor(queryConf)); -// tupleExpr.visit(new PartitionFilterTimeIndexVisitor(queryConf)); - } - FilterRangeVisitor rangeVisitor = new FilterRangeVisitor(queryConf); - tupleExpr.visit(rangeVisitor); - tupleExpr.visit(rangeVisitor); //this has to be done twice to get replace the statementpatterns with the right ranges - EvaluationStatistics stats = null; - if (!queryConf.isUseStats() && queryConf.isPerformant() || rdfEvalStatsDAO == null) { - stats = new DefaultStatistics(); - } else if (queryConf.isUseStats()) { - - if (queryConf.isUseSelectivity()) { - stats = new RdfCloudTripleStoreSelectivityEvaluationStatistics(queryConf, rdfEvalStatsDAO, - selectEvalDAO); - } else { - stats = new RdfCloudTripleStoreEvaluationStatistics(queryConf, rdfEvalStatsDAO); - } - } - if (stats != null) { - - if (stats instanceof RdfCloudTripleStoreSelectivityEvaluationStatistics) { - - (new QueryJoinSelectOptimizer((RdfCloudTripleStoreSelectivityEvaluationStatistics) stats, - selectEvalDAO)).optimize(tupleExpr, dataset, bindings); - } else { - - (new mvm.rya.rdftriplestore.evaluation.QueryJoinOptimizer(stats)).optimize(tupleExpr, dataset, - bindings); // TODO: Make pluggable - } - } - - final CloseableIteration iter = strategy - .evaluate(tupleExpr, EmptyBindingSet.getInstance()); - CloseableIteration iterWrap = new CloseableIteration() { - - @Override - public void remove() throws QueryEvaluationException { - iter.remove(); - } - - @Override - public BindingSet next() throws QueryEvaluationException { - return iter.next(); - } - - @Override - public boolean hasNext() throws QueryEvaluationException { - return iter.hasNext(); - } - - @Override - public void close() throws QueryEvaluationException { - iter.close(); - strategy.shutdown(); - } - }; - return iterWrap; - } catch (QueryEvaluationException e) { - throw new SailException(e); - } catch (Exception e) { - throw new SailException(e); - } - } - - @Override - protected CloseableIteration getContextIDsInternal() - throws SailException { - verifyIsOpen(); - - // iterate through all contextids - return null; - } - - @Override - protected String getNamespaceInternal(String s) throws SailException { - return namespaceManager.getNamespace(s); - } - - @Override - protected CloseableIteration getNamespacesInternal() - throws SailException { - return namespaceManager.iterateNamespace(); - } - - @Override - protected CloseableIteration getStatementsInternal( - Resource subject, URI predicate, Value object, boolean flag, - Resource... contexts) throws SailException { -// try { - //have to do this to get the inferred values - //TODO: Will this method reduce performance? - final Var subjVar = decorateValue(subject, "s"); - final Var predVar = decorateValue(predicate, "p"); - final Var objVar = decorateValue(object, "o"); - StatementPattern sp = null; - final boolean hasContext = contexts != null && contexts.length > 0; - final Resource context = (hasContext) ? contexts[0] : null; - final Var cntxtVar = decorateValue(context, "c"); - //TODO: Only using one context here - sp = new StatementPattern(subjVar, predVar, objVar, cntxtVar); - //return new StoreTripleSource(store.getConf()).getStatements(resource, uri, value, contexts); - final CloseableIteration evaluate = evaluate(sp, null, null, false); - return new CloseableIteration() { //TODO: Use a util class to do this - private boolean isClosed = false; - - @Override - public void close() throws SailException { - isClosed = true; - try { - evaluate.close(); - } catch (QueryEvaluationException e) { - throw new SailException(e); - } - } - - @Override - public boolean hasNext() throws SailException { - try { - return evaluate.hasNext(); - } catch (QueryEvaluationException e) { - throw new SailException(e); - } - } - - @Override - public Statement next() throws SailException { - if (!hasNext() || isClosed) { - throw new NoSuchElementException(); - } - - try { - BindingSet next = evaluate.next(); - Resource bs_subj = (Resource) ((subjVar.hasValue()) ? subjVar.getValue() : next.getBinding(subjVar.getName()).getValue()); - URI bs_pred = (URI) ((predVar.hasValue()) ? predVar.getValue() : next.getBinding(predVar.getName()).getValue()); - Value bs_obj = (objVar.hasValue()) ? objVar.getValue() : (Value) next.getBinding(objVar.getName()).getValue(); - Binding b_cntxt = next.getBinding(cntxtVar.getName()); - - //convert BindingSet to Statement - if (b_cntxt != null) { - return new ContextStatementImpl(bs_subj, bs_pred, bs_obj, (Resource) b_cntxt.getValue()); - } else { - return new StatementImpl(bs_subj, bs_pred, bs_obj); - } - } catch (QueryEvaluationException e) { - throw new SailException(e); - } - } - - @Override - public void remove() throws SailException { - try { - evaluate.remove(); - } catch (QueryEvaluationException e) { - throw new SailException(e); - } - } - }; -// } catch (QueryEvaluationException e) { -// throw new SailException(e); -// } - } - - protected Var decorateValue(Value val, String name) { - if (val == null) { - return new Var(name); - } else { - return new Var(name, val); - } - } - - @Override - protected void removeNamespaceInternal(String s) throws SailException { - namespaceManager.removeNamespace(s); - } - - @Override - protected void removeStatementsInternal(Resource subject, URI predicate, - Value object, Resource... contexts) throws SailException { - if (!(subject instanceof URI)) { - throw new SailException("Subject[" + subject + "] must be URI"); - } - - try { - if (contexts != null && contexts.length > 0) { - for (Resource context : contexts) { - if (!(context instanceof URI)) { - throw new SailException("Context[" + context + "] must be URI"); - } - RyaStatement statement = new RyaStatement( - RdfToRyaConversions.convertResource(subject), - RdfToRyaConversions.convertURI(predicate), - RdfToRyaConversions.convertValue(object), - RdfToRyaConversions.convertResource(context)); - - ryaDAO.delete(statement, conf); - } - } else { - RyaStatement statement = new RyaStatement( - RdfToRyaConversions.convertResource(subject), - RdfToRyaConversions.convertURI(predicate), - RdfToRyaConversions.convertValue(object), - null); - - ryaDAO.delete(statement, conf); - } - } catch (RyaDAOException e) { - throw new SailException(e); - } - } - - @Override - protected void rollbackInternal() throws SailException { - //TODO: No transactional layer as of yet - } - - @Override - protected void setNamespaceInternal(String s, String s1) - throws SailException { - namespaceManager.addNamespace(s, s1); - } - - @Override - protected long sizeInternal(Resource... contexts) throws SailException { - logger.error("Cannot determine size as of yet"); - - return 0; - } - - @Override - protected void startTransactionInternal() throws SailException { - //TODO: ? - } - - public class StoreTripleSource implements TripleSource { - - private RdfCloudTripleStoreConfiguration conf; - - public StoreTripleSource(RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - } - - public CloseableIteration getStatements( - Resource subject, URI predicate, Value object, - Resource... contexts) throws QueryEvaluationException { - return RyaDAOHelper.query(ryaDAO, subject, predicate, object, conf, contexts); - } - - public CloseableIteration, QueryEvaluationException> getStatements( - Collection> statements, - Resource... contexts) throws QueryEvaluationException { - - return RyaDAOHelper.query(ryaDAO, statements, conf); - } - - public ValueFactory getValueFactory() { - return RdfCloudTripleStoreConstants.VALUE_FACTORY; - } - } - - public InferenceEngine getInferenceEngine() { - return inferenceEngine; - } - public RdfCloudTripleStoreConfiguration getConf() { - return conf; - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java deleted file mode 100644 index 42f1aa4e8..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java +++ /dev/null @@ -1,56 +0,0 @@ -package mvm.rya.rdftriplestore; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.sail.Sail; -import org.openrdf.sail.config.SailConfigException; -import org.openrdf.sail.config.SailFactory; -import org.openrdf.sail.config.SailImplConfig; - -public class RdfCloudTripleStoreFactory implements SailFactory { - - public static final String SAIL_TYPE = "openrdf:RdfCloudTripleStore"; - - @Override - public SailImplConfig getConfig() { - return new RdfCloudTripleStoreSailConfig(); - } - - @Override - public Sail getSail(SailImplConfig config) throws SailConfigException { -// RdfCloudTripleStore cbStore = new RdfCloudTripleStore(); -// RdfCloudTripleStoreSailConfig cbconfig = (RdfCloudTripleStoreSailConfig) config; -// cbStore.setServer(cbconfig.getServer()); -// cbStore.setPort(cbconfig.getPort()); -// cbStore.setInstance(cbconfig.getInstance()); -// cbStore.setPassword(cbconfig.getPassword()); -// cbStore.setUser(cbconfig.getUser()); -// return cbStore; - return null; //TODO: How? - } - - @Override - public String getSailType() { - return SAIL_TYPE; - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java deleted file mode 100644 index 6542b5503..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java +++ /dev/null @@ -1,133 +0,0 @@ -package mvm.rya.rdftriplestore; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.*; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.util.GraphUtil; -import org.openrdf.model.util.GraphUtilException; -import org.openrdf.sail.config.SailConfigException; -import org.openrdf.sail.config.SailImplConfigBase; - -public class RdfCloudTripleStoreSailConfig extends SailImplConfigBase { - - public static final String NAMESPACE = "http://www.openrdf.org/config/sail/cloudbasestore#"; - - public static final URI SERVER; - public static final URI PORT; - public static final URI INSTANCE; - public static final URI USER; - public static final URI PASSWORD; - - static { - ValueFactory factory = ValueFactoryImpl.getInstance(); - SERVER = factory.createURI(NAMESPACE, "server"); - PORT = factory.createURI(NAMESPACE, "port"); - INSTANCE = factory.createURI(NAMESPACE, "instance"); - USER = factory.createURI(NAMESPACE, "user"); - PASSWORD = factory.createURI(NAMESPACE, "password"); - } - - private String server = "stratus13"; - - private int port = 2181; - - private String user = "root"; - - private String password = "password"; - - private String instance = "stratus"; - - public String getServer() { - return server; - } - - public void setServer(String server) { - this.server = server; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public String getInstance() { - return instance; - } - - public void setInstance(String instance) { - this.instance = instance; - } - - @Override - public void parse(Graph graph, Resource implNode) - throws SailConfigException - { - super.parse(graph, implNode); - System.out.println("parsing"); - - try { - Literal serverLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, SERVER); - if (serverLit != null) { - setServer(serverLit.getLabel()); - } - Literal portLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, PORT); - if (portLit != null) { - setPort(Integer.parseInt(portLit.getLabel())); - } - Literal instList = GraphUtil.getOptionalObjectLiteral(graph, implNode, INSTANCE); - if (instList != null) { - setInstance(instList.getLabel()); - } - Literal userLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, USER); - if (userLit != null) { - setUser(userLit.getLabel()); - } - Literal pwdLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, PASSWORD); - if (pwdLit != null) { - setPassword(pwdLit.getLabel()); - } - } - catch (GraphUtilException e) { - throw new SailConfigException(e.getMessage(), e); - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java b/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java deleted file mode 100644 index 700339810..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java +++ /dev/null @@ -1,53 +0,0 @@ -package mvm.rya.rdftriplestore; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.sail.Sail; -import org.openrdf.sail.SailException; - -/** - * Created by IntelliJ IDEA. - * User: RoshanP - * Date: 3/23/12 - * Time: 10:05 AM - * To change this template use File | Settings | File Templates. - */ -public class RyaSailRepository extends SailRepository{ - public RyaSailRepository(Sail sail) { - super(sail); - } - - @Override - public SailRepositoryConnection getConnection() throws RepositoryException { - try - { - return new RyaSailRepositoryConnection(this, this.getSail().getConnection()); - } - catch(SailException e) - { - throw new RepositoryException(e); - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java b/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java deleted file mode 100644 index 6261b8c81..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java +++ /dev/null @@ -1,109 +0,0 @@ -package mvm.rya.rdftriplestore; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.IOException; -import java.io.InputStream; -import java.io.Reader; - -import mvm.rya.rdftriplestore.utils.CombineContextsRdfInserter; - -import org.openrdf.OpenRDFUtil; -import org.openrdf.model.Resource; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; -import org.openrdf.repository.util.RDFLoader; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.sail.SailConnection; - -/** - * The real reason for this is so that we can combine contexts from an input stream/reader and the given contexts in the add function - */ -public class RyaSailRepositoryConnection extends SailRepositoryConnection { - - protected RyaSailRepositoryConnection(SailRepository repository, SailConnection sailConnection) { - super(repository, sailConnection); - } - - @Override - public void add(InputStream in, String baseURI, RDFFormat dataFormat, Resource... contexts) throws IOException, RDFParseException, - RepositoryException { - OpenRDFUtil.verifyContextNotNull(contexts); - - CombineContextsRdfInserter rdfInserter = new CombineContextsRdfInserter(this); - rdfInserter.enforceContext(contexts); - - boolean localTransaction = startLocalTransaction(); - try { - RDFLoader loader = new RDFLoader(getParserConfig(), getValueFactory()); - loader.load(in, baseURI, dataFormat, rdfInserter); - - conditionalCommit(localTransaction); - } catch (RDFHandlerException e) { - conditionalRollback(localTransaction); - - throw ((RepositoryException) e.getCause()); - } catch (RDFParseException e) { - conditionalRollback(localTransaction); - throw e; - } catch (IOException e) { - conditionalRollback(localTransaction); - throw e; - } catch (RuntimeException e) { - conditionalRollback(localTransaction); - throw e; - } - } - - @Override - public void add(Reader reader, String baseURI, RDFFormat dataFormat, Resource... contexts) throws IOException, RDFParseException, - RepositoryException { - OpenRDFUtil.verifyContextNotNull(contexts); - - CombineContextsRdfInserter rdfInserter = new CombineContextsRdfInserter(this); - rdfInserter.enforceContext(contexts); - - boolean localTransaction = startLocalTransaction(); - try { - RDFLoader loader = new RDFLoader(getParserConfig(), getValueFactory()); - loader.load(reader, baseURI, dataFormat, rdfInserter); - - conditionalCommit(localTransaction); - } catch (RDFHandlerException e) { - conditionalRollback(localTransaction); - - throw ((RepositoryException) e.getCause()); - } catch (RDFParseException e) { - conditionalRollback(localTransaction); - throw e; - } catch (IOException e) { - conditionalRollback(localTransaction); - throw e; - } catch (RuntimeException e) { - conditionalRollback(localTransaction); - throw e; - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java deleted file mode 100644 index b84104a64..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java +++ /dev/null @@ -1,33 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; - -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; - -public interface ExternalBatchingIterator { - public CloseableIteration evaluate(Collection bindingset) throws QueryEvaluationException; -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java deleted file mode 100644 index 16ef58847..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java +++ /dev/null @@ -1,109 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import info.aduna.iteration.LookAheadIteration; - -import java.util.ArrayList; -import java.util.Collection; - -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; - -/** - */ -public class ExternalMultipleBindingSetsIterator extends LookAheadIteration { - - private final ParallelEvaluationStrategyImpl strategy; - private final CloseableIteration leftIter; - private ExternalBatchingIterator stmtPtrn; - private CloseableIteration iter; - //TODO: configurable - private int batchSize = 1000; - - public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, ExternalBatchingIterator stmtPattern, BindingSet bindings) - throws QueryEvaluationException { - this.strategy = strategy; - leftIter = strategy.evaluate(leftArg, bindings); - this.stmtPtrn = stmtPattern; - initIter(); - } - - public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, ExternalBatchingIterator stmtPattern, BindingSet bindings) - throws QueryEvaluationException { - this.strategy = strategy; - this.leftIter = leftIter; - this.stmtPtrn = stmtPattern; - initIter(); - } - - protected void initIter() throws QueryEvaluationException { - try { - Collection sets = new ArrayList(); - int i = 0; - while (leftIter.hasNext()) { - //default to 1K for the batch size - if (i >= batchSize) { - break; - } - sets.add((BindingSet) leftIter.next()); - i++; - } - if (iter != null) iter.close(); - iter = stmtPtrn.evaluate(sets); - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } - - protected BindingSet getNextElement() - throws QueryEvaluationException { - try { - while (true) { - if (iter.hasNext()) { - return iter.next(); - } - - if (leftIter.hasNext()) { - initIter(); - } else - return null; - } - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } - - protected void handleClose() - throws QueryEvaluationException { - try { - super.handleClose(); - leftIter.close(); - iter.close(); - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java deleted file mode 100644 index 24e2527b2..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java +++ /dev/null @@ -1,97 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.domain.RangeURI; -import mvm.rya.api.domain.RangeValue; -import org.openrdf.model.Value; -import org.openrdf.model.impl.BooleanLiteralImpl; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.*; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static mvm.rya.api.RdfCloudTripleStoreConstants.RANGE; - -/** - * Class FilterTimeIndexVisitor - * Date: Apr 11, 2011 - * Time: 10:16:15 PM - */ -public class FilterRangeVisitor extends QueryModelVisitorBase { - - private RdfCloudTripleStoreConfiguration conf; - private Map rangeValues = new HashMap(); - - public FilterRangeVisitor(RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - } - - @Override - public void meet(Filter node) throws Exception { - super.meet(node); - - ValueExpr arg = node.getCondition(); - if (arg instanceof FunctionCall) { - FunctionCall fc = (FunctionCall) arg; - if (RANGE.stringValue().equals(fc.getURI())) { - //range(?var, start, end) - List valueExprs = fc.getArgs(); - if (valueExprs.size() != 3) { - throw new QueryEvaluationException("mvm:range must have 3 parameters: variable, start, end"); - } - Var var = (Var) valueExprs.get(0); - ValueConstant startVc = (ValueConstant) valueExprs.get(1); - ValueConstant endVc = (ValueConstant) valueExprs.get(2); - Value start = startVc.getValue(); - Value end = endVc.getValue(); - rangeValues.put(var, new RangeValue(start, end)); - node.setCondition(new ValueConstant(BooleanLiteralImpl.TRUE)); - } - } - } - - @Override - public void meet(StatementPattern node) throws Exception { - super.meet(node); - - Var subjectVar = node.getSubjectVar(); - RangeValue subjRange = rangeValues.get(subjectVar); - Var predVar = node.getPredicateVar(); - RangeValue predRange = rangeValues.get(predVar); - Var objVar = node.getObjectVar(); - RangeValue objRange = rangeValues.get(objVar); - if(subjRange != null) { - subjectVar.setValue(new RangeURI(subjRange));//Assumes no blank nodes can be ranges - } - if(predRange != null) { - predVar.setValue(new RangeURI(predRange)); - } - if(objRange != null) { - objVar.setValue(objRange); - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java deleted file mode 100644 index 01f3d275a..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java +++ /dev/null @@ -1,108 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import info.aduna.iteration.LookAheadIteration; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; - -import java.util.ArrayList; -import java.util.Collection; - -/** - */ -public class MultipleBindingSetsIterator extends LookAheadIteration { - - private final ParallelEvaluationStrategyImpl strategy; - private final CloseableIteration leftIter; - private StatementPattern stmtPtrn; - private CloseableIteration iter; - //TODO: configurable - private int batchSize = 1000; - - public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, StatementPattern stmtPattern, BindingSet bindings) - throws QueryEvaluationException { - this.strategy = strategy; - leftIter = strategy.evaluate(leftArg, bindings); - this.stmtPtrn = stmtPattern; - initIter(); - } - - public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, StatementPattern stmtPattern, BindingSet bindings) - throws QueryEvaluationException { - this.strategy = strategy; - this.leftIter = leftIter; - this.stmtPtrn = stmtPattern; - initIter(); - } - - protected void initIter() throws QueryEvaluationException { - try { - Collection sets = new ArrayList(); - int i = 0; - while (leftIter.hasNext()) { - //default to 1K for the batch size - if (i >= batchSize) { - break; - } - sets.add((BindingSet) leftIter.next()); - i++; - } - if (iter != null) iter.close(); - iter = strategy.evaluate(stmtPtrn, sets); - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } - - protected BindingSet getNextElement() - throws QueryEvaluationException { - try { - while (true) { - if (iter.hasNext()) { - return iter.next(); - } - - if (leftIter.hasNext()) { - initIter(); - } else - return null; - } - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } - - protected void handleClose() - throws QueryEvaluationException { - try { - super.handleClose(); - leftIter.close(); - iter.close(); - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java deleted file mode 100644 index 30dc96619..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java +++ /dev/null @@ -1,281 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import info.aduna.iteration.ConvertingIteration; -import info.aduna.iteration.EmptyIteration; -import info.aduna.iteration.Iteration; -import info.aduna.iteration.IteratorIteration; -import info.aduna.iteration.LimitIteration; -import info.aduna.iteration.OffsetIteration; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.utils.NullableStatementImpl; -import mvm.rya.rdftriplestore.RdfCloudTripleStoreConnection; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.inference.InferenceEngineException; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import mvm.rya.rdftriplestore.utils.TransitivePropertySP; - -import org.apache.log4j.Logger; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.QueryRoot; -import org.openrdf.query.algebra.Slice; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.ValueExpr; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.QueryBindingSet; -import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException; -import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl; -import org.openrdf.query.algebra.evaluation.iterator.FilterIterator; -import org.openrdf.query.algebra.evaluation.iterator.JoinIterator; -import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; - -import com.google.common.collect.Lists; - -/** - */ -public class ParallelEvaluationStrategyImpl extends EvaluationStrategyImpl { - private static Logger logger = Logger.getLogger(ParallelEvaluationStrategyImpl.class); - - private int numOfThreads = 10; - private boolean performant = true; - private boolean displayQueryPlan = false; - private ExecutorService executorService; - private InferenceEngine inferenceEngine; - - public ParallelEvaluationStrategyImpl(RdfCloudTripleStoreConnection.StoreTripleSource tripleSource, InferenceEngine inferenceEngine, - Dataset dataset, RdfCloudTripleStoreConfiguration conf) { - super(tripleSource, dataset); - Integer nthreads = conf.getNumThreads(); - this.numOfThreads = (nthreads != null) ? nthreads : this.numOfThreads; - Boolean val = conf.isPerformant(); - this.performant = (val != null) ? val : this.performant; - val = conf.isDisplayQueryPlan(); - this.displayQueryPlan = (val != null) ? val : this.displayQueryPlan; - this.executorService = Executors.newFixedThreadPool(this.numOfThreads); - this.inferenceEngine = inferenceEngine; - } - - @Override - public CloseableIteration evaluate(Join join, BindingSet bindings) throws QueryEvaluationException { - if (performant) { - TupleExpr buffer = join.getLeftArg(); - if (join.getRightArg() instanceof StatementPattern) { - TupleExpr stmtPat = join.getRightArg(); -// if(buffer instanceof StatementPattern && !(stmtPat instanceof StatementPattern)){ -// buffer = stmtPat; -// stmtPat = join.getLeftArg(); -// } - - return new MultipleBindingSetsIterator(this, buffer, (StatementPattern) stmtPat, bindings); - } else if (join.getRightArg() instanceof ExternalBatchingIterator) { - TupleExpr stmtPat = join.getRightArg(); - - return new ExternalMultipleBindingSetsIterator(this, buffer, (ExternalBatchingIterator) stmtPat, bindings); - } else if (join.getRightArg() instanceof Filter) { - //add performance for the filter too - Filter filter = (Filter) join.getRightArg(); - TupleExpr filterChild = filter.getArg(); - if (filterChild instanceof StatementPattern) { - return new FilterIterator(filter, new MultipleBindingSetsIterator(this, buffer, (StatementPattern) filterChild, bindings), this); - } else if (filterChild instanceof Join) { - Join filterChildJoin = (Join) filterChild; - TupleExpr fcj_left = filterChildJoin.getLeftArg(); - TupleExpr fcj_right = filterChildJoin.getRightArg(); - //TODO: Should be a better way, maybe reorder the filter? - //very particular case filter(join(stmtPat, stmtPat)) - if (fcj_left instanceof StatementPattern && fcj_right instanceof StatementPattern) { - return new FilterIterator(filter, new MultipleBindingSetsIterator(this, new Join(buffer, fcj_left), (StatementPattern) fcj_right, bindings), this); - } - } - //TODO: add a configuration flag for ParallelJoinIterator - return new JoinIterator(this, join, bindings); - } else { - //TODO: add a configuration flag for ParallelJoinIterator - return new JoinIterator(this, join, bindings); - } - } else { - return super.evaluate(join, bindings); - } - } - - @Override - public CloseableIteration evaluate(StatementPattern sp, BindingSet bindings) throws QueryEvaluationException { - //TODO: Wonder if creating a Collection here hurts performance - Set bs = Collections.singleton(bindings); - return this.evaluate(sp, bs); - } - - public CloseableIteration evaluate(final StatementPattern sp, Collection bindings) - throws QueryEvaluationException { - - final Var subjVar = sp.getSubjectVar(); - final Var predVar = sp.getPredicateVar(); - final Var objVar = sp.getObjectVar(); - final Var cntxtVar = sp.getContextVar(); - - List> stmts = new ArrayList>(); - - Iteration, QueryEvaluationException> iter; - if (sp instanceof FixedStatementPattern) { - Collection> coll = Lists.newArrayList(); - for (BindingSet binding : bindings) { - Value subjValue = getVarValue(subjVar, binding); - Value predValue = getVarValue(predVar, binding); - Value objValue = getVarValue(objVar, binding); - Resource contxtValue = (Resource) getVarValue(cntxtVar, binding); - for (Statement st : ((FixedStatementPattern) sp).statements) { - if (!((subjValue != null && !subjValue.equals(st.getSubject())) || - (predValue != null && !predValue.equals(st.getPredicate())) || - (objValue != null && !objValue.equals(st.getObject())))) { - coll.add(new RdfCloudTripleStoreUtils.CustomEntry(st, binding)); - } - } - } - iter = new IteratorIteration(coll.iterator()); - } else if (sp instanceof TransitivePropertySP && - ((subjVar != null && subjVar.getValue() != null) || - (objVar != null && objVar.getValue() != null)) && - sp.getPredicateVar() != null) { - //if this is a transitive prop ref, we need to make sure that either the subj or obj is not null - //TODO: Cannot handle a open ended transitive property where subj and obj are null - //TODO: Should one day handle filling in the subj or obj with bindings and working this - //TODO: a lot of assumptions, and might be a large set returned causing an OME - Set sts = null; - try { - sts = inferenceEngine.findTransitiveProperty((Resource) getVarValue(subjVar), - (URI) getVarValue(predVar), getVarValue(objVar), (Resource) getVarValue(cntxtVar)); - } catch (InferenceEngineException e) { - throw new QueryEvaluationException(e); - } - Collection> coll = new ArrayList(); - for (BindingSet binding : bindings) { - for (Statement st : sts) { - coll.add(new RdfCloudTripleStoreUtils.CustomEntry(st, binding)); - } - } - iter = new IteratorIteration(coll.iterator()); - } else { - for (BindingSet binding : bindings) { - Value subjValue = getVarValue(subjVar, binding); - Value predValue = getVarValue(predVar, binding); - Value objValue = getVarValue(objVar, binding); - Resource contxtValue = (Resource) getVarValue(cntxtVar, binding); - if ((subjValue != null && !(subjValue instanceof Resource)) || - (predValue != null && !(predValue instanceof URI))) { - continue; - } - stmts.add(new RdfCloudTripleStoreUtils.CustomEntry( - new NullableStatementImpl((Resource) subjValue, (URI) predValue, objValue, contxtValue), binding)); - } - if (stmts.size() == 0) { - return new EmptyIteration(); - } - - iter = ((RdfCloudTripleStoreConnection.StoreTripleSource) tripleSource).getStatements(stmts); - } - return new ConvertingIteration, BindingSet, QueryEvaluationException>(iter) { - - @Override - protected BindingSet convert(Map.Entry stbs) throws QueryEvaluationException { - Statement st = stbs.getKey(); - BindingSet bs = stbs.getValue(); - QueryBindingSet result = new QueryBindingSet(bs); - if (subjVar != null && !result.hasBinding(subjVar.getName())) { - result.addBinding(subjVar.getName(), st.getSubject()); - } - if (predVar != null && !result.hasBinding(predVar.getName())) { - result.addBinding(predVar.getName(), st.getPredicate()); - } - if (objVar != null && !result.hasBinding(objVar.getName())) { - result.addBinding(objVar.getName(), st.getObject()); - } - if (cntxtVar != null && !result.hasBinding(cntxtVar.getName()) && st.getContext() != null) { - result.addBinding(cntxtVar.getName(), st.getContext()); - } - return result; - } - }; - } - - @Override - public CloseableIteration evaluate(TupleExpr expr, BindingSet bindings) throws QueryEvaluationException { - if (expr instanceof QueryRoot) { - if (displayQueryPlan) { -// System.out.println("Tables: "); -// System.out.println("--SPO: \t" + RdfCloudTripleStoreConstants.TBL_SPO); -// System.out.println("--PO: \t" + RdfCloudTripleStoreConstants.TBL_PO); -// System.out.println("--OSP: \t" + RdfCloudTripleStoreConstants.TBL_OSP); - logger.info("=================== Rya Query ==================="); - for (String str : expr.toString().split("\\r?\\n")) { - logger.info(str); - } - logger.info("================= End Rya Query ================="); - } - } - return super.evaluate(expr, bindings); - } - - public CloseableIteration evaluate(Slice slice, BindingSet bindings) - throws QueryEvaluationException { - CloseableIteration result = evaluate(slice.getArg(), bindings); - if (slice.hasOffset()) { - result = new OffsetIteration(result, slice.getOffset()); - } - if (slice.hasLimit()) { - result = new LimitIteration(result, slice.getLimit()); - } - return result; - } - - protected Value getVarValue(Var var) { - if (var == null) - return null; - else - return var.getValue(); - } - - public void shutdown() { - executorService.shutdownNow(); - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java deleted file mode 100644 index 1d5c98238..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java +++ /dev/null @@ -1,139 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import info.aduna.iteration.LookAheadIteration; - -import java.util.NoSuchElementException; -import java.util.Queue; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedBlockingQueue; - -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.evaluation.EvaluationStrategy; -import org.openrdf.query.impl.EmptyBindingSet; - -/** - */ -public class ParallelJoinIterator extends LookAheadIteration { - - public static final EmptyBindingSet EMPTY_BINDING_SET = new EmptyBindingSet(); - - private final EvaluationStrategy strategy; - private final Join join; - private final CloseableIteration leftIter; - - private ExecutorService executorService; - private Queue workQueue = new LinkedBlockingQueue(); - private ParallelIteratorWork currentWork; - private int batch; - - public ParallelJoinIterator(EvaluationStrategy strategy, Join join, BindingSet bindings, ExecutorService executorService, int batch) - throws QueryEvaluationException { - this.strategy = strategy; - this.join = join; - leftIter = strategy.evaluate(join.getLeftArg(), bindings); - - this.executorService = executorService; - this.batch = batch; - } - - - @Override - protected BindingSet getNextElement() throws QueryEvaluationException { - - try { - while (leftIter.hasNext() || !workQueue.isEmpty() || currentWork != null) { - if (!workQueue.isEmpty() && currentWork == null) { - currentWork = workQueue.poll(); - } - - if (currentWork != null) { - BindingSet bindingSet = currentWork.queue.poll(); - if (EMPTY_BINDING_SET.equals(bindingSet)) { - currentWork = null; - continue; - } else if (bindingSet == null) { - continue; - } - return bindingSet; - } - - try { - for (int i = 0; i < batch; i++) { - if (leftIter.hasNext()) { - ParallelIteratorWork work = new ParallelIteratorWork((BindingSet) leftIter.next(), join.getRightArg()); - workQueue.add(work); - executorService.execute(work); - } else - break; - } - } catch (NoSuchElementException ignore) { - } - } - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - return null; - } - - @Override - protected void handleClose() throws QueryEvaluationException { - try { - super.handleClose(); - leftIter.close(); -// rightIter.close(); - } catch (Exception e) { - throw new QueryEvaluationException(e); - } - } - - private class ParallelIteratorWork implements Runnable { - - private BindingSet leftBindingSet; - private TupleExpr rightTupleExpr; - public LinkedBlockingQueue queue = new LinkedBlockingQueue(); - - private ParallelIteratorWork(BindingSet leftBindingSet, TupleExpr rightTupleExpr) { - this.leftBindingSet = leftBindingSet; - this.rightTupleExpr = rightTupleExpr; - } - - @Override - public void run() { - try { - CloseableIteration iter = strategy.evaluate(rightTupleExpr, leftBindingSet); - while (iter.hasNext()) { - queue.add(iter.next()); - } - queue.add(EMPTY_BINDING_SET); - iter.close(); - } catch (QueryEvaluationException e) { - throw new RuntimeException(e); - } - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java deleted file mode 100644 index 342f98d2d..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java +++ /dev/null @@ -1,57 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -/** - * Class ReorderJoinVisitor - * Date: Apr 11, 2011 - * Time: 10:16:15 PM - */ -public class PushJoinDownVisitor extends QueryModelVisitorBase { - @Override - public void meet(Join node) throws Exception { - super.meet(node); - - TupleExpr leftArg = node.getLeftArg(); - TupleExpr rightArg = node.getRightArg(); - - /** - * if join(join(1, 2), join(3,4)) - * should be: - * join(join(join(1,2), 3), 4) - */ - if (leftArg instanceof Join && rightArg instanceof Join) { - Join leftJoin = (Join) leftArg; - Join rightJoin = (Join) rightArg; - TupleExpr right_LeftArg = rightJoin.getLeftArg(); - TupleExpr right_rightArg = rightJoin.getRightArg(); - Join inner = new Join(leftJoin, right_LeftArg); - Join outer = new Join(inner, right_rightArg); - node.replaceWith(outer); - } - - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java deleted file mode 100644 index 940e46e94..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java +++ /dev/null @@ -1,284 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.algebra.*; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; -import org.openrdf.query.algebra.helpers.StatementPatternCollector; - -import java.util.*; - -/** - * A query optimizer that re-orders nested Joins. - * - * @author Arjohn Kampman - * @author James Leigh - */ -public class QueryJoinOptimizer implements QueryOptimizer { - - protected final EvaluationStatistics statistics; - - public QueryJoinOptimizer() { - this(new EvaluationStatistics()); - } - - public QueryJoinOptimizer(EvaluationStatistics statistics) { - this.statistics = statistics; - } - - /** - * Applies generally applicable optimizations: path expressions are sorted - * from more to less specific. - * - * @param tupleExpr - */ - public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) { - tupleExpr.visit(new JoinVisitor()); - } - - protected class JoinVisitor extends QueryModelVisitorBase { - - Set boundVars = new HashSet(); - - @Override - public void meet(LeftJoin leftJoin) { - leftJoin.getLeftArg().visit(this); - - Set origBoundVars = boundVars; - try { - boundVars = new HashSet(boundVars); - boundVars.addAll(leftJoin.getLeftArg().getBindingNames()); - - leftJoin.getRightArg().visit(this); - } finally { - boundVars = origBoundVars; - } - } - - @Override - public void meet(Join node) { - Set origBoundVars = boundVars; - try { - boundVars = new HashSet(boundVars); - - // Recursively get the join arguments - List joinArgs = getJoinArgs(node, new ArrayList()); - - // Build maps of cardinalities and vars per tuple expression - Map cardinalityMap = new HashMap(); -// Map> varsMap = new HashMap>(); -// Map varCardinalityMap = new HashMap(); - - for (TupleExpr tupleExpr : joinArgs) { - double cardinality = statistics.getCardinality(tupleExpr); -// List statementPatternVars = getStatementPatternVars(tupleExpr); - - cardinalityMap.put(tupleExpr, cardinality); -// varsMap.put(tupleExpr, statementPatternVars); - } - - // Build map of var frequences -// Map varFreqMap = new HashMap(); -// for (List varList : varsMap.values()) { -// getVarFreqMap(varList, varFreqMap); -// } - - // Reorder the (recursive) join arguments to a more optimal sequence - List orderedJoinArgs = new ArrayList(joinArgs.size()); - while (!joinArgs.isEmpty()) { - TupleExpr tupleExpr = selectNextTupleExpr(joinArgs, cardinalityMap - ); - if (tupleExpr == null) { - break; - } - - joinArgs.remove(tupleExpr); - orderedJoinArgs.add(tupleExpr); - - // Recursively optimize join arguments - tupleExpr.visit(this); - - boundVars.addAll(tupleExpr.getBindingNames()); - } - - // Build new join hierarchy - // Note: generated hierarchy is right-recursive to help the - // IterativeEvaluationOptimizer to factor out the left-most join - // argument - int i = 0; - TupleExpr replacement = orderedJoinArgs.get(i); - for (i++; i < orderedJoinArgs.size(); i++) { - replacement = new Join(replacement, orderedJoinArgs.get(i)); - } - - // Replace old join hierarchy - node.replaceWith(replacement); - } finally { - boundVars = origBoundVars; - } - } - - protected > L getJoinArgs(TupleExpr tupleExpr, L joinArgs) { - if (tupleExpr instanceof Join) { - Join join = (Join) tupleExpr; - getJoinArgs(join.getLeftArg(), joinArgs); - getJoinArgs(join.getRightArg(), joinArgs); - } else { - joinArgs.add(tupleExpr); - } - - return joinArgs; - } - - protected List getStatementPatternVars(TupleExpr tupleExpr) { - List stPatterns = StatementPatternCollector.process(tupleExpr); - List varList = new ArrayList(stPatterns.size() * 4); - for (StatementPattern sp : stPatterns) { - sp.getVars(varList); - } - return varList; - } - - protected > M getVarFreqMap(List varList, M varFreqMap) { - for (Var var : varList) { - Integer freq = varFreqMap.get(var); - freq = (freq == null) ? 1 : freq + 1; - varFreqMap.put(var, freq); - } - return varFreqMap; - } - - /** - * Selects from a list of tuple expressions the next tuple expression that - * should be evaluated. This method selects the tuple expression with - * highest number of bound variables, preferring variables that have been - * bound in other tuple expressions over variables with a fixed value. - */ - protected TupleExpr selectNextTupleExpr(List expressions, - Map cardinalityMap -// ,Map> varsMap, -// Map varFreqMap, Set boundVars - ) { - double lowestCardinality = Double.MAX_VALUE; - TupleExpr result = expressions.get(0); - - for (TupleExpr tupleExpr : expressions) { - // Calculate a score for this tuple expression -// double cardinality = getTupleExprCardinality(tupleExpr, cardinalityMap, varsMap, varFreqMap, boundVars); - double cardinality = cardinalityMap.get(tupleExpr); -// List vars = varsMap.get(tupleExpr); -// List distinctUnboundVars = getUnboundVars(vars); -// if (distinctUnboundVars.size() >= 2) { -// cardinality *= (distinctUnboundVars.size() + 1); -// } - - if (cardinality < lowestCardinality) { - // More specific path expression found - lowestCardinality = cardinality; - result = tupleExpr; - } - } - - return result; - } - - protected double getTupleExprCardinality(TupleExpr tupleExpr, Map cardinalityMap, - Map> varsMap, Map varFreqMap, Set boundVars) { - double cardinality = cardinalityMap.get(tupleExpr); - - List vars = varsMap.get(tupleExpr); - - // Compensate for variables that are bound earlier in the evaluation - List unboundVars = getUnboundVars(vars); - List constantVars = getConstantVars(vars); - int nonConstantVarCount = vars.size() - constantVars.size(); - if (nonConstantVarCount > 0) { - double exp = (double) unboundVars.size() / nonConstantVarCount; - cardinality = Math.pow(cardinality, exp); - } - - if (unboundVars.isEmpty()) { - // Prefer patterns with more bound vars - if (nonConstantVarCount > 0) { - cardinality /= nonConstantVarCount; - } - } else { - // Prefer patterns that bind variables from other tuple expressions - int foreignVarFreq = getForeignVarFreq(unboundVars, varFreqMap); - if (foreignVarFreq > 0) { - cardinality /= foreignVarFreq; - } - } - - // Prefer patterns that bind more variables - List distinctUnboundVars = getUnboundVars(new - HashSet(vars)); - if (distinctUnboundVars.size() >= 2) { - cardinality /= distinctUnboundVars.size(); - } - - return cardinality; - } - - protected List getConstantVars(Iterable vars) { - List constantVars = new ArrayList(); - - for (Var var : vars) { - if (var.hasValue()) { - constantVars.add(var); - } - } - - return constantVars; - } - - protected List getUnboundVars(Iterable vars) { - List unboundVars = new ArrayList(); - - for (Var var : vars) { - if (!var.hasValue() && !this.boundVars.contains(var.getName())) { - unboundVars.add(var); - } - } - - return unboundVars; - } - - protected int getForeignVarFreq(List ownUnboundVars, Map varFreqMap) { - int result = 0; - - Map ownFreqMap = getVarFreqMap(ownUnboundVars, new HashMap()); - - for (Map.Entry entry : ownFreqMap.entrySet()) { - Var var = entry.getKey(); - int ownFreq = entry.getValue(); - result += varFreqMap.get(var) - ownFreq; - } - - return result; - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java deleted file mode 100644 index 643446a38..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java +++ /dev/null @@ -1,260 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.joinselect.SelectivityEvalDAO; -import mvm.rya.rdftriplestore.inference.DoNotExpandSP; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; - -import org.openrdf.query.BindingSet; -import org.openrdf.query.Dataset; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.evaluation.QueryOptimizer; -import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -public class QueryJoinSelectOptimizer implements QueryOptimizer { - - private final EvaluationStatistics statistics; - private final SelectivityEvalDAO eval; - private final RdfCloudTripleStoreConfiguration config; - - public QueryJoinSelectOptimizer(EvaluationStatistics statistics, SelectivityEvalDAO eval) { - System.out.println("Entering join optimizer!"); - this.statistics = statistics; - this.eval = eval; - this.config = eval.getConf(); - } - - /** - * Applies generally applicable optimizations: path expressions are sorted from more to less specific. - * - * @param tupleExpr - */ - public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) { - tupleExpr.visit(new JoinVisitor()); - } - - protected class JoinVisitor extends QueryModelVisitorBase { - - @Override - public void meet(Join node) { - - try { - if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) { - return; - } - - TupleExpr partialQuery = null; - List joinArgs = getJoinArgs(node, new ArrayList()); - Map cardinalityMap = new HashMap(); - - for (TupleExpr tupleExpr : joinArgs) { - double cardinality = statistics.getCardinality(tupleExpr); - cardinalityMap.put(tupleExpr, cardinality); - - } - - while (!joinArgs.isEmpty()) { - TePairCost tpc = getBestTupleJoin(partialQuery, joinArgs); - List tePair = tpc.getTePair(); - if (partialQuery == null) { - if (tePair.size() != 2) { - throw new IllegalStateException(); - } - if (!(tePair.get(0) instanceof Join)) { - tePair.get(0).visit(this); - } - if (!(tePair.get(1) instanceof Join)) { - tePair.get(1).visit(this); - } - if (tePair.get(1) instanceof Join) { - partialQuery = new Join(tePair.get(0), ((Join) tePair.get(1)).getLeftArg()); - partialQuery = new Join(partialQuery, ((Join) tePair.get(1)).getRightArg()); - joinArgs.remove(tePair.get(0)); - joinArgs.remove(tePair.get(1)); - } else { - partialQuery = new Join(tePair.get(0), tePair.get(1)); - joinArgs.remove(tePair.get(0)); - joinArgs.remove(tePair.get(1)); - } - } else { - if (tePair.size() != 1) { - throw new IllegalStateException(); - } - if (!(tePair.get(0) instanceof Join)) { - tePair.get(0).visit(this); - } - - if (tePair.get(0) instanceof Join) { - partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getLeftArg()); - partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getRightArg()); - joinArgs.remove(tePair.get(0)); - - } else { - partialQuery = new Join(partialQuery, tePair.get(0)); - joinArgs.remove(tePair.get(0)); - } - } - - } - - // Replace old join hierarchy - node.replaceWith(partialQuery); - - } catch (Exception e) { - e.printStackTrace(); - } - } - - protected > L getJoinArgs(TupleExpr tupleExpr, L joinArgs) { - if (tupleExpr instanceof Join) { - if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) { - Join join = (Join) tupleExpr; - getJoinArgs(join.getLeftArg(), joinArgs); - getJoinArgs(join.getRightArg(), joinArgs); - } else { - joinArgs.add(tupleExpr); - } - } else { - joinArgs.add(tupleExpr); - } - - return joinArgs; - } - - public TePairCost getBestTupleJoin(TupleExpr partialQuery, List teList) throws Exception { - - double tempCost = 0; - double bestCost = Double.MAX_VALUE; - List bestJoinNodes = new ArrayList(); - - if (partialQuery == null) { - - double jSelect = 0; - double card1 = 0; - double card2 = 0; - TupleExpr teMin1 = null; - TupleExpr teMin2 = null; - double bestCard1 = 0; - double bestCard2 = 0; - - for (int i = 0; i < teList.size(); i++) { - for (int j = i + 1; j < teList.size(); j++) { - jSelect = eval.getJoinSelect(config, teList.get(i), teList.get(j)); - card1 = statistics.getCardinality(teList.get(i)); - card2 = statistics.getCardinality(teList.get(j)); - tempCost = card1 + card2 + card1 * card2 * jSelect; -// System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is " -// + jSelect + ", and nodes are " -// + teList.get(i) + " and " + teList.get(j)); - - // TODO this generates a nullpointer exception if tempCost = Double.Max - if (bestCost > tempCost) { - - teMin1 = teList.get(i); - teMin2 = teList.get(j); - bestCard1 = card1; - bestCard2 = card2; - bestCost = tempCost; - - if (bestCost == 0) { - bestJoinNodes.add(teMin1); - bestJoinNodes.add(teMin2); - return new TePairCost(0.0, bestJoinNodes); - } - } - } - } - - if (bestCard1 < bestCard2) { - - bestJoinNodes.add(teMin1); - bestJoinNodes.add(teMin2); - - } else { - bestJoinNodes.add(teMin2); - bestJoinNodes.add(teMin1); - } - //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + jSelect + ", and best cost is" + bestCost); - return new TePairCost(bestCost, bestJoinNodes); - - } else { - double card1 = statistics.getCardinality(partialQuery); - TupleExpr bestTe = null; - double card2 = 0; - double select = 0; - - for (TupleExpr te : teList) { - select = eval.getJoinSelect(config, partialQuery, te); - card2 = statistics.getCardinality(te); - tempCost = card1 + card2 + card1 * card2 * select; -// System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is " -// + select + ", and nodes are " -// + partialQuery + " and " + te); - - - if (bestCost > tempCost) { - bestTe = te; - bestCost = tempCost; - } - - } - List teList2 = new ArrayList(); - teList2.add(bestTe); - //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + select + ", and best cost is" + bestCost); - return new TePairCost(bestCost, teList2); - } - - } - - // ************************************************************************************** - public class TePairCost { - - private double cost; - private List tePair; - - public TePairCost(double cost, List tePair) { - this.cost = cost; - this.tePair = tePair; - - } - - public double getCost() { - return cost; - } - - public List getTePair() { - return tePair; - } - - } - - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java deleted file mode 100644 index b0fa46cd6..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java +++ /dev/null @@ -1,281 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; -//import static RdfCloudTripleStoreUtils.getTtlValueConverter; - - - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF; -import mvm.rya.rdftriplestore.inference.DoNotExpandSP; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; - -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.query.algebra.BinaryTupleOperator; -import org.openrdf.query.algebra.Filter; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.Projection; -import org.openrdf.query.algebra.Slice; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.UnaryTupleOperator; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics; - -/** - * Class RdfCloudTripleStoreEvaluationStatistics - * Date: Apr 12, 2011 - * Time: 1:31:05 PM - */ -public class RdfCloudTripleStoreEvaluationStatistics extends EvaluationStatistics { - - private RdfCloudTripleStoreConfiguration conf; - private RdfEvalStatsDAO rdfEvalStatsDAO; - protected boolean pushEmptyRdfTypeDown = true; - protected boolean useCompositeCardinalities = true; - - public RdfCloudTripleStoreEvaluationStatistics(RdfCloudTripleStoreConfiguration conf, RdfEvalStatsDAO rdfEvalStatsDAO) { - checkNotNull(conf); - checkNotNull(rdfEvalStatsDAO); - try { - this.conf = conf; - this.rdfEvalStatsDAO = rdfEvalStatsDAO; - pushEmptyRdfTypeDown = conf.isStatsPushEmptyRdftypeDown(); - useCompositeCardinalities = conf.isUseCompositeCardinality(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - @Override - public double getCardinality(TupleExpr expr) { - if (expr instanceof Filter) { - Filter f = (Filter) expr; - // filters must make sets smaller - return super.getCardinality(f.getArg()) / 10; - } - return super.getCardinality(expr); - } - - @Override - protected CardinalityCalculator createCardinalityCalculator() { - return new RdfCloudTripleStoreCardinalityCalculator(this); - } - - public RdfEvalStatsDAO getRdfEvalStatsDAO() { - return rdfEvalStatsDAO; - } - - public void setRdfEvalStatsDAO(RdfEvalStatsDAO rdfEvalStatsDAO) { - this.rdfEvalStatsDAO = rdfEvalStatsDAO; - } - - public class RdfCloudTripleStoreCardinalityCalculator extends CardinalityCalculator { - private RdfCloudTripleStoreEvaluationStatistics statistics; - protected Map> fspMap; - - public RdfCloudTripleStoreCardinalityCalculator(RdfCloudTripleStoreEvaluationStatistics statistics) { - this.statistics = statistics; - } - - - @Override - protected double getCardinality(StatementPattern sp) { - Var subjectVar = sp.getSubjectVar(); - Resource subj = (Resource) getConstantValue(subjectVar); - Var predicateVar = sp.getPredicateVar(); - URI pred = (URI) getConstantValue(predicateVar); - Var objectVar = sp.getObjectVar(); - Value obj = getConstantValue(objectVar); - Resource context = (Resource) getConstantValue(sp.getContextVar()); - - // set rdf type to be a max value (as long as the object/subject aren't specified) to - if (pred != null) { - if (statistics.pushEmptyRdfTypeDown && RDF.TYPE.equals(pred) && subj == null && obj == null) { - return Double.MAX_VALUE; - } - } - - // FixedStatementPattern indicates that this is when backward chaining reasoning is being used - if (sp instanceof FixedStatementPattern) { - //no query here - FixedStatementPattern fsp = (FixedStatementPattern) sp; - //TODO: assume that only the subject is open ended here - Var fspSubjectVar = fsp.getSubjectVar(); - if (fspSubjectVar != null && fspSubjectVar.getValue() == null) { - if (fspMap == null) { - fspMap = new HashMap>(); - } - fspMap.put(fspSubjectVar, fsp.statements); - } - return fsp.statements.size(); - } - - /** - * Use the output of the FixedStatementPattern to determine more information - */ - if (fspMap != null && sp instanceof DoNotExpandSP) { - //TODO: Might be a better way than 3 map pulls - RdfEvalStatsDAO.CARDINALITY_OF cardinality_of = null; - Collection statements = null; - // TODO unsure of how to incorporate additional cardinalities here - if (objectVar != null && objectVar.getValue() == null) { - statements = fspMap.get(objectVar); - cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.OBJECT; - } - if (statements == null && predicateVar != null && predicateVar.getValue() == null) { - statements = fspMap.get(predicateVar); - cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE; - } - if (statements == null && subjectVar != null && subjectVar.getValue() == null) { - statements = fspMap.get(subjectVar); - cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT; - } - if (statements != null) { - double fspCard = 0; - for (Statement statement : statements) { - List values = new ArrayList(); - values.add(statement.getSubject()); - fspCard += rdfEvalStatsDAO.getCardinality(conf, cardinality_of, values, context); - } - return fspCard; - } - } - - /** - * We put full triple scans before rdf:type because more often than not - * the triple scan is being joined with something else that is better than - * asking the full rdf:type of everything. - */ - double cardinality = Double.MAX_VALUE - 1; - try { - if (subj != null) { - List values = new ArrayList(); - CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT; - values.add(subj); - if (useCompositeCardinalities){ - if (pred != null){ - values.add(pred); - card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTPREDICATE; - } - else if (obj != null){ - values.add(obj); - card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTOBJECT; - } - } - double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context); - // the cardinality will be -1 if there was no value found (if the index does not exist) - if (evalCard >= 0) { - cardinality = Math.min(cardinality, evalCard); - } else { - cardinality = 1; - } - } - else if (pred != null) { - List values = new ArrayList(); - CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE; - values.add(pred); - if (useCompositeCardinalities){ - if (obj != null){ - values.add(obj); - card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATEOBJECT; - } - } - double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context); - if (evalCard >= 0) { - cardinality = Math.min(cardinality, evalCard); - } else { - cardinality = 1; - } - } - else if (obj != null) { - List values = new ArrayList(); - values.add(obj); - double evalCard = rdfEvalStatsDAO.getCardinality(conf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values, context); - if (evalCard >= 0) { - cardinality = Math.min(cardinality, evalCard); - } else { - cardinality = 1; - } - } - } catch (Exception e) { - throw new RuntimeException(e); - } - - return cardinality; - } - - @Override - protected void meetUnaryTupleOperator(UnaryTupleOperator node) { - if (node instanceof Projection) { - cardinality += -1.0; - } - super.meetUnaryTupleOperator(node); - } - - @Override - protected void meetBinaryTupleOperator(BinaryTupleOperator node) { - node.getLeftArg().visit(this); - double leftArgCost = cardinality; - node.getRightArg().visit(this); - cardinality += leftArgCost; - } - - // TODO Is this sufficient for add capability of slice node? - @Override - public void meet(Slice node) { - cardinality = node.getLimit(); - } - - - @Override - public void meet(Join node) { - node.getLeftArg().visit(this); - double leftArgCost = cardinality; - node.getRightArg().visit(this); - if (leftArgCost > cardinality) { - cardinality = leftArgCost; //TODO: Is this ok? - } - } - - protected Value getConstantValue(Var var) { - if (var != null) - return var.getValue(); - else - return null; - } - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java deleted file mode 100644 index 7c886402d..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java +++ /dev/null @@ -1,128 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static com.google.common.base.Preconditions.checkNotNull; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.api.persist.joinselect.SelectivityEvalDAO; -import mvm.rya.rdftriplestore.inference.DoNotExpandSP; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; - -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.StatementPattern; - -public class RdfCloudTripleStoreSelectivityEvaluationStatistics extends RdfCloudTripleStoreEvaluationStatistics { - - // allows access to join selectivity and extending RdfCloudTripleStoreEvaluationStatistics allows for use of prospector - private SelectivityEvalDAO selectEvalStatsDAO; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains - // RdfEvalStatsDAO object - - protected double filterCard; - RdfCloudTripleStoreConfiguration config; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains conf as well - - public RdfCloudTripleStoreSelectivityEvaluationStatistics(RdfCloudTripleStoreConfiguration conf, - RdfEvalStatsDAO prospector, SelectivityEvalDAO selectEvalStatsDAO) { - - super(conf, prospector); - checkNotNull(selectEvalStatsDAO); - - try { - this.selectEvalStatsDAO = selectEvalStatsDAO; - this.config = conf; // TODO fix this! - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - @Override - protected CardinalityCalculator createCardinalityCalculator() { - try { - return new SelectivityCardinalityCalculator(this); - } catch (Exception e) { - System.out.println(e); - throw new RuntimeException(e); - } - } - - public class SelectivityCardinalityCalculator extends RdfCloudTripleStoreCardinalityCalculator { - - public SelectivityCardinalityCalculator(RdfCloudTripleStoreSelectivityEvaluationStatistics statistics) { - super(statistics); - } - - @Override - public void meet(Join node) { - node.getLeftArg().visit(this); - double leftArgCost = cardinality; - // System.out.println("Left cardinality is " + cardinality); - node.getRightArg().visit(this); - - if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) { - return; - } - - try { - double selectivity = selectEvalStatsDAO.getJoinSelect(config, node.getLeftArg(), node.getRightArg()); -// System.out.println("CardCalc: left cost of " + node.getLeftArg() + " is " + leftArgCost + " right cost of " -// + node.getRightArg() + " is " + cardinality); -// System.out.println("Right cardinality is " + cardinality); - cardinality += leftArgCost + leftArgCost * cardinality * selectivity; -// System.out.println("CardCalc: Cardinality is " + cardinality); -// System.out.println("CardCalc: Selectivity is " + selectivity); - // System.out.println("Join cardinality is " + cardinality); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - - - - - @Override - public double getCardinality(StatementPattern node) { - - cardinality = super.getCardinality(node); - - // If sp contains all variables or is EmptyRDFtype, assign - // cardinality - // equal to table size - if (cardinality == Double.MAX_VALUE || cardinality == Double.MAX_VALUE - 1) { - try { - cardinality = selectEvalStatsDAO.getTableSize(config); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - return cardinality; - } - - - - - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java deleted file mode 100644 index f825921c1..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java +++ /dev/null @@ -1,70 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -/** - * Class ReorderJoinVisitor - * Date: Apr 11, 2011 - * Time: 10:16:15 PM - */ -public class ReorderJoinVisitor extends QueryModelVisitorBase { - @Override - public void meet(Join node) throws Exception { - super.meet(node); - - TupleExpr leftArg = node.getLeftArg(); - TupleExpr rightArg = node.getRightArg(); - - /** - * if join(stmtPattern1, join(stmtPattern2, anything) - * Should be - * join(join(stmtPattern1, stmtPattern2), anything) - */ - if (leftArg instanceof StatementPattern && rightArg instanceof Join) { - Join rightJoin = (Join) rightArg; - //find the stmtPattern in the right side - TupleExpr right_LeftArg = rightJoin.getLeftArg(); - TupleExpr right_rightArg = rightJoin.getRightArg(); - if (right_LeftArg instanceof StatementPattern || right_rightArg instanceof StatementPattern) { - StatementPattern stmtPattern = null; - TupleExpr anything = null; - if (right_LeftArg instanceof StatementPattern) { - stmtPattern = (StatementPattern) right_LeftArg; - anything = right_rightArg; - } else { - stmtPattern = (StatementPattern) right_rightArg; - anything = right_LeftArg; - } - - Join inner = new Join(leftArg, stmtPattern); - Join outer = new Join(inner, anything); - node.replaceWith(outer); - } - } - - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java deleted file mode 100644 index 002b804bb..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java +++ /dev/null @@ -1,55 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.*; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -/** - * TODO: This might be a very bad thing. It may force all AND and not allow ORs?. Depends on how they do the bindings. - * Class SeparateFilterJoinsVisitor - * Date: Apr 11, 2011 - * Time: 10:16:15 PM - */ -public class SeparateFilterJoinsVisitor extends QueryModelVisitorBase { - @Override - public void meet(Filter node) throws Exception { - super.meet(node); - - ValueExpr condition = node.getCondition(); - TupleExpr arg = node.getArg(); - if (!(arg instanceof Join)) { - return; - } - - Join join = (Join) arg; - TupleExpr leftArg = join.getLeftArg(); - TupleExpr rightArg = join.getRightArg(); - - if (leftArg instanceof StatementPattern && rightArg instanceof StatementPattern) { - Filter left = new Filter(leftArg, condition); - Filter right = new Filter(rightArg, condition); - node.replaceWith(new Join(left, right)); - } - - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java deleted file mode 100644 index f6d3ff068..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java +++ /dev/null @@ -1,108 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import mvm.rya.rdftriplestore.utils.TransitivePropertySP; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import mvm.rya.rdftriplestore.utils.TransitivePropertySP; -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Union; -import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; - -import static com.google.common.base.Preconditions.checkNotNull; - -/** - * Class AbstractInferVisitor - * Date: Mar 14, 2012 - * Time: 5:33:01 PM - */ -public class AbstractInferVisitor extends QueryModelVisitorBase { - - static Var EXPANDED = new Var("infer-expanded"); - - boolean include = true; - - RdfCloudTripleStoreConfiguration conf; - InferenceEngine inferenceEngine; - - public AbstractInferVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) { - checkNotNull(conf, "Configuration cannot be null"); - checkNotNull(inferenceEngine, "Inference Engine cannot be null"); - this.conf = conf; - this.inferenceEngine = inferenceEngine; - } - - @Override - public void meet(StatementPattern sp) throws Exception { - if (!include) { - return; - } - if (sp instanceof FixedStatementPattern || sp instanceof TransitivePropertySP || sp instanceof DoNotExpandSP) { - return; //already inferred somewhere else - } - final Var predVar = sp.getPredicateVar(); - //we do not let timeRange preds be inferred, not good - if (predVar == null || predVar.getValue() == null -// || RdfCloudTripleStoreUtils.getTtlValueConverter(conf, (URI) predVar.getValue()) != null - ) { - return; - } - meetSP(sp); - } - - protected void meetSP(StatementPattern sp) throws Exception { - - } - - @Override - public void meet(Union node) throws Exception { -// if (!(node instanceof InferUnion)) - super.meet(node); - } - - @Override - public void meet(Join node) throws Exception { - if (!(node instanceof InferJoin)) { - super.meet(node); - } - } - - public RdfCloudTripleStoreConfiguration getConf() { - return conf; - } - - public void setConf(RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - } - - public InferenceEngine getInferenceEngine() { - return inferenceEngine; - } - - public void setInferenceEngine(InferenceEngine inferenceEngine) { - this.inferenceEngine = inferenceEngine; - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java deleted file mode 100644 index aed7ed06e..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java +++ /dev/null @@ -1,51 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -/** - * Class DoNotExpandSP - * Date: Mar 15, 2012 - * Time: 9:39:45 AM - */ -public class DoNotExpandSP extends StatementPattern{ - public DoNotExpandSP() { - } - - public DoNotExpandSP(Var subject, Var predicate, Var object) { - super(subject, predicate, object); - } - - public DoNotExpandSP(Scope scope, Var subject, Var predicate, Var object) { - super(scope, subject, predicate, object); - } - - public DoNotExpandSP(Var subject, Var predicate, Var object, Var context) { - super(subject, predicate, object, context); - } - - public DoNotExpandSP(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) { - super(scope, subjVar, predVar, objVar, conVar); - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java deleted file mode 100644 index aa0b99b8d..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java +++ /dev/null @@ -1,34 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Interface InferConstants - * Date: Apr 16, 2011 - * Time: 7:30:47 AM - */ -public interface InferConstants { - - public static final String INFERRED = "inferred"; - public static final String TRUE = "true"; - public static final String FALSE = "false"; -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java deleted file mode 100644 index 87854ac26..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java +++ /dev/null @@ -1,50 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.Join; -import org.openrdf.query.algebra.TupleExpr; - -import java.util.HashMap; -import java.util.Map; - -/** - * Class InferJoin - * Date: Apr 16, 2011 - * Time: 7:29:40 AM - */ -public class InferJoin extends Join { - - private Map properties = new HashMap(); - - public InferJoin() { - } - - public InferJoin(TupleExpr leftArg, TupleExpr rightArg) { - super(leftArg, rightArg); - } - - public Map getProperties() { - return properties; - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java deleted file mode 100644 index 4d229d0d9..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java +++ /dev/null @@ -1,48 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.Union; - -import java.util.HashMap; -import java.util.Map; - -/** - * Class InferUnion - * Date: Mar 14, 2012 - * Time: 12:43:49 PM - */ -public class InferUnion extends Union { - private Map properties = new HashMap(); - - public InferUnion() { - } - - public InferUnion(TupleExpr leftArg, TupleExpr rightArg) { - super(leftArg, rightArg); - } - - public Map getProperties() { - return properties; - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java deleted file mode 100644 index f4ed42009..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java +++ /dev/null @@ -1,410 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Graph; -import com.tinkerpop.blueprints.Vertex; -import com.tinkerpop.blueprints.impls.tg.TinkerGraphFactory; -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.utils.RyaDAOHelper; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.QueryEvaluationException; - -import java.util.*; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; - -/** - * Will pull down inference relationships from dao every x seconds.
- * Will infer extra relationships.
- * Will cache relationships in Graph for later use.
- */ -public class InferenceEngine { - - private Graph subClassOfGraph; - private Graph subPropertyOfGraph; - private Set symmetricPropertySet; - private Map inverseOfMap; - private Set transitivePropertySet; - - private RyaDAO ryaDAO; - private RdfCloudTripleStoreConfiguration conf; - private boolean initialized = false; - private boolean schedule = true; - - private long refreshGraphSchedule = 5 * 60 * 1000; //5 min - private Timer timer; - public static final String URI_PROP = "uri"; - - public void init() throws InferenceEngineException { - try { - if (isInitialized()) { - return; - } - - checkNotNull(conf, "Configuration is null"); - checkNotNull(ryaDAO, "RdfDao is null"); - checkArgument(ryaDAO.isInitialized(), "RdfDao is not initialized"); - - if (schedule) { - timer = new Timer(InferenceEngine.class.getName()); - timer.scheduleAtFixedRate(new TimerTask() { - - @Override - public void run() { - try { - refreshGraph(); - } catch (InferenceEngineException e) { - throw new RuntimeException(e); - } - } - - }, refreshGraphSchedule, refreshGraphSchedule); - } - refreshGraph(); - setInitialized(true); - } catch (RyaDAOException e) { - throw new InferenceEngineException(e); - } - } - - public void destroy() throws InferenceEngineException { - setInitialized(false); - if (timer != null) { - timer.cancel(); - } - } - - public void refreshGraph() throws InferenceEngineException { - try { - //get all subclassof - Graph graph = TinkerGraphFactory.createTinkerGraph(); - CloseableIteration iter = RyaDAOHelper.query(ryaDAO, null, - RDFS.SUBCLASSOF, null, conf); - try { - while (iter.hasNext()) { - String edgeName = RDFS.SUBCLASSOF.stringValue(); - Statement st = iter.next(); - addStatementEdge(graph, edgeName, st); - } - } finally { - if (iter != null) { - iter.close(); - } - } - - subClassOfGraph = graph; //TODO: Should this be synchronized? - - graph = TinkerGraphFactory.createTinkerGraph(); - - iter = RyaDAOHelper.query(ryaDAO, null, - RDFS.SUBPROPERTYOF, null, conf); - try { - while (iter.hasNext()) { - String edgeName = RDFS.SUBPROPERTYOF.stringValue(); - Statement st = iter.next(); - addStatementEdge(graph, edgeName, st); - } - } finally { - if (iter != null) { - iter.close(); - } - } - - //equiv property really is the same as a subPropertyOf both ways - iter = RyaDAOHelper.query(ryaDAO, null, OWL.EQUIVALENTPROPERTY, null, conf); - try { - while (iter.hasNext()) { - String edgeName = RDFS.SUBPROPERTYOF.stringValue(); - Statement st = iter.next(); - addStatementEdge(graph, edgeName, st); - //reverse is also true - addStatementEdge(graph, edgeName, new StatementImpl((Resource) st.getObject(), st.getPredicate(), st.getSubject())); - } - } finally { - if (iter != null) { - iter.close(); - } - } - - subPropertyOfGraph = graph; //TODO: Should this be synchronized? - - iter = RyaDAOHelper.query(ryaDAO, null, RDF.TYPE, OWL.SYMMETRICPROPERTY, conf); - Set symProp = new HashSet(); - try { - while (iter.hasNext()) { - Statement st = iter.next(); - symProp.add((URI) st.getSubject()); //safe to assume it is a URI? - } - } finally { - if (iter != null) { - iter.close(); - } - } - symmetricPropertySet = symProp; - - iter = RyaDAOHelper.query(ryaDAO, null, RDF.TYPE, OWL.TRANSITIVEPROPERTY, conf); - Set transProp = new HashSet(); - try { - while (iter.hasNext()) { - Statement st = iter.next(); - transProp.add((URI) st.getSubject()); - } - } finally { - if (iter != null) { - iter.close(); - } - } - transitivePropertySet = transProp; - - iter = RyaDAOHelper.query(ryaDAO, null, OWL.INVERSEOF, null, conf); - Map invProp = new HashMap(); - try { - while (iter.hasNext()) { - Statement st = iter.next(); - invProp.put((URI) st.getSubject(), (URI) st.getObject()); - invProp.put((URI) st.getObject(), (URI) st.getSubject()); - } - } finally { - if (iter != null) { - iter.close(); - } - } - inverseOfMap = invProp; - } catch (QueryEvaluationException e) { - throw new InferenceEngineException(e); - } - } - - protected void addStatementEdge(Graph graph, String edgeName, Statement st) { - Resource subj = st.getSubject(); - Vertex a = graph.getVertex(subj); - if (a == null) { - a = graph.addVertex(subj); - a.setProperty(URI_PROP, subj); - } - Value obj = st.getObject(); - Vertex b = graph.getVertex(obj); - if (b == null) { - b = graph.addVertex(obj); - b.setProperty(URI_PROP, obj); - } - graph.addEdge(null, a, b, edgeName); - } - - public Set findParents(Graph graph, URI vertexId) { - Set parents = new HashSet(); - if (graph == null) { - return parents; - } - Vertex v = graph.getVertex(vertexId); - if (v == null) { - return parents; - } - addParents(v, parents); - return parents; - } - - private static void addParents(Vertex v, Set parents) { - for (Edge edge : v.getEdges(Direction.IN)) { - Vertex ov = edge.getVertex(Direction.OUT); - Object o = ov.getProperty(URI_PROP); - if (o != null && o instanceof URI) { - boolean contains = parents.contains(o); - if (!contains) { - parents.add((URI) o); - addParents(ov, parents); - } - } - - } - } - - public boolean isSymmetricProperty(URI prop) { - return (symmetricPropertySet != null) && symmetricPropertySet.contains(prop); - } - - public URI findInverseOf(URI prop) { - return (inverseOfMap != null) ? inverseOfMap.get(prop) : (null); - } - - public boolean isTransitiveProperty(URI prop) { - return (transitivePropertySet != null) && transitivePropertySet.contains(prop); - } - - /** - * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go - */ - public Set findTransitiveProperty(Resource subj, URI prop, Value obj, Resource... contxts) throws InferenceEngineException { - if (transitivePropertySet.contains(prop)) { - Set sts = new HashSet(); - boolean goUp = subj == null; - chainTransitiveProperty(subj, prop, obj, (goUp) ? (obj) : (subj), sts, goUp, contxts); - return sts; - } else - return null; - } - - /** - * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go - */ - public Set findSameAs(Resource value, Resource... contxts) throws InferenceEngineException{ - Set sameAs = new HashSet(); - sameAs.add(value); - findSameAsChaining(value, sameAs, contxts); - return sameAs; - } - - /** - * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go - */ - public void findSameAsChaining(Resource subj, Set currentSameAs, Resource[] contxts) throws InferenceEngineException{ - try { - CloseableIteration subjIter = RyaDAOHelper.query(ryaDAO, subj, OWL.SAMEAS, null, conf, contxts); - while (subjIter.hasNext()){ - Statement st = subjIter.next(); - if (!currentSameAs.contains(st.getObject())){ - Resource castedObj = (Resource) st.getObject(); - currentSameAs.add(castedObj); - findSameAsChaining(castedObj, currentSameAs, contxts); - } - } - subjIter.close(); - CloseableIteration objIter = RyaDAOHelper.query(ryaDAO, null, OWL.SAMEAS, subj, conf, contxts); - while (objIter.hasNext()){ - Statement st = objIter.next(); - if (!currentSameAs.contains(st.getSubject())){ - Resource sameAsSubj = st.getSubject(); - currentSameAs.add(sameAsSubj); - findSameAsChaining(sameAsSubj, currentSameAs, contxts); - } - } - objIter.close(); - } catch (QueryEvaluationException e) { - throw new InferenceEngineException(e); - } - - } - - protected void chainTransitiveProperty(Resource subj, URI prop, Value obj, Value core, Set sts, boolean goUp, Resource[] contxts) throws InferenceEngineException { - try { - CloseableIteration iter = RyaDAOHelper.query(ryaDAO, subj, prop, obj, conf, contxts); - while (iter.hasNext()) { - Statement st = iter.next(); - sts.add(new StatementImpl((goUp) ? (st.getSubject()) : (Resource) (core), prop, (!goUp) ? (st.getObject()) : (core))); - if (goUp) { - chainTransitiveProperty(null, prop, st.getSubject(), core, sts, goUp, contxts); - } else { - chainTransitiveProperty((Resource) st.getObject(), prop, null, core, sts, goUp, contxts); - } - } - iter.close(); - } catch (QueryEvaluationException e) { - throw new InferenceEngineException(e); - } - } - - public boolean isInitialized() { - return initialized; - } - - public void setInitialized(boolean initialized) { - this.initialized = initialized; - } - - public RyaDAO getRyaDAO() { - return ryaDAO; - } - - public void setRyaDAO(RyaDAO ryaDAO) { - this.ryaDAO = ryaDAO; - } - - public RdfCloudTripleStoreConfiguration getConf() { - return conf; - } - - public void setConf(RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - } - - public Graph getSubClassOfGraph() { - return subClassOfGraph; - } - - public Graph getSubPropertyOfGraph() { - return subPropertyOfGraph; - } - - public long getRefreshGraphSchedule() { - return refreshGraphSchedule; - } - - public void setRefreshGraphSchedule(long refreshGraphSchedule) { - this.refreshGraphSchedule = refreshGraphSchedule; - } - - public Set getSymmetricPropertySet() { - return symmetricPropertySet; - } - - public void setSymmetricPropertySet(Set symmetricPropertySet) { - this.symmetricPropertySet = symmetricPropertySet; - } - - public Map getInverseOfMap() { - return inverseOfMap; - } - - public void setInverseOfMap(Map inverseOfMap) { - this.inverseOfMap = inverseOfMap; - } - - public Set getTransitivePropertySet() { - return transitivePropertySet; - } - - public void setTransitivePropertySet(Set transitivePropertySet) { - this.transitivePropertySet = transitivePropertySet; - } - - public boolean isSchedule() { - return schedule; - } - - public void setSchedule(boolean schedule) { - this.schedule = schedule; - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java deleted file mode 100644 index 4fc94e383..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java +++ /dev/null @@ -1,43 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -/** - * Date: 7/20/12 - * Time: 11:03 AM - */ -public class InferenceEngineException extends Exception { - public InferenceEngineException() { - } - - public InferenceEngineException(String s) { - super(s); - } - - public InferenceEngineException(String s, Throwable throwable) { - super(s, throwable); - } - - public InferenceEngineException(Throwable throwable) { - super(throwable); - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java deleted file mode 100644 index 6f8004cd9..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java +++ /dev/null @@ -1,80 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Union; -import org.openrdf.query.algebra.Var; - -/** - * All predicates are changed - * Class SubPropertyOfVisitor - * Date: Mar 29, 2011 - * Time: 11:28:34 AM - */ -public class InverseOfVisitor extends AbstractInferVisitor { - - public InverseOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) { - super(conf, inferenceEngine); - include = conf.isInferInverseOf(); - } - - @Override - protected void meetSP(StatementPattern node) throws Exception { - StatementPattern sp = node.clone(); - final Var predVar = sp.getPredicateVar(); - - URI pred = (URI) predVar.getValue(); - String predNamespace = pred.getNamespace(); - - final Var objVar = sp.getObjectVar(); - final Var cntxtVar = sp.getContextVar(); - if (objVar != null && - !RDF.NAMESPACE.equals(predNamespace) && - !SESAME.NAMESPACE.equals(predNamespace) && - !RDFS.NAMESPACE.equals(predNamespace) - && !EXPANDED.equals(cntxtVar)) { - /** - * - * { ?a ?pred ?b .}\n" + - " UNION " + - " { ?b ?pred ?a } - */ - - URI predUri = (URI) predVar.getValue(); - URI invPropUri = inferenceEngine.findInverseOf(predUri); - if (invPropUri != null) { - Var subjVar = sp.getSubjectVar(); - Union union = new InferUnion(); - union.setLeftArg(sp); - union.setRightArg(new StatementPattern(objVar, new Var(predVar.getName(), invPropUri), subjVar, cntxtVar)); - node.replaceWith(union); - } - } - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java deleted file mode 100644 index d035026aa..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java +++ /dev/null @@ -1,187 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.utils.NullableStatementImpl; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import mvm.rya.rdftriplestore.utils.TransitivePropertySP; -import org.openrdf.model.Resource; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -import java.util.HashSet; -import java.util.Set; -import java.util.UUID; - -/** - * All predicates are changed - * Class SubPropertyOfVisitor - * Date: Mar 29, 2011 - * Time: 11:28:34 AM - */ -public class SameAsVisitor extends AbstractInferVisitor { - - public SameAsVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) { - super(conf, inferenceEngine); - include = conf.isInferSubPropertyOf(); // oops - } - - public void meet(StatementPattern sp) throws Exception { - if (!include) { - return; - } - if (sp instanceof FixedStatementPattern || sp instanceof TransitivePropertySP || sp instanceof DoNotExpandSP) { - return; //already inferred somewhere else - } - final Var predVar = sp.getPredicateVar(); - //do not know when things are null - if (predVar == null) { - return; - } - meetSP(sp); - } - - @Override - protected void meetSP(StatementPattern node) throws Exception { - StatementPattern sp = node.clone(); - final Var predVar = sp.getPredicateVar(); - - boolean shouldExpand = true; - if (predVar.hasValue()){ - URI pred = (URI) predVar.getValue(); - String predNamespace = pred.getNamespace(); - shouldExpand = !pred.equals(OWL.SAMEAS) && - !RDF.NAMESPACE.equals(predNamespace) && - !SESAME.NAMESPACE.equals(predNamespace) && - !RDFS.NAMESPACE.equals(predNamespace); - } - - final Var objVar = sp.getObjectVar(); - final Var subjVar = sp.getSubjectVar(); - final Var cntxtVar = sp.getContextVar(); - if (shouldExpand - && !EXPANDED.equals(cntxtVar) && !(objVar == null) && !(subjVar == null)){ - if (objVar.getValue() == null) { - Value subjVarValue = subjVar.getValue(); - if (subjVarValue instanceof Resource){ - Set uris = inferenceEngine.findSameAs((Resource)subjVar.getValue(), getVarValue(cntxtVar)); - if (uris.size() > 1){ - InferJoin join = getReplaceJoin(uris, true, subjVar, objVar, predVar, cntxtVar); - node.replaceWith(join); - } - } - } - else if (subjVar.getValue() == null) { - Value objVarValue = objVar.getValue(); - if (objVarValue instanceof Resource){ - Set uris = inferenceEngine.findSameAs((Resource)objVar.getValue(), getVarValue(cntxtVar)); - if (uris.size() > 1){ - InferJoin join = getReplaceJoin(uris, false, subjVar, objVar, predVar, cntxtVar); - node.replaceWith(join); - } - } - } - else { - // both subj and pred are set and should be expanded - Set subjURIs = new HashSet(); - Set objURIs = new HashSet(); - // TODO I don't like these checks -- is there a better way to do this? - Value objVarValue = objVar.getValue(); - if (objVarValue instanceof Resource){ - objURIs = inferenceEngine.findSameAs((Resource)objVar.getValue(), getVarValue(cntxtVar)); - } - Value subjVarValue = subjVar.getValue(); - if (subjVarValue instanceof Resource){ - subjURIs = inferenceEngine.findSameAs((Resource)subjVar.getValue(), getVarValue(cntxtVar)); - } - InferJoin finalJoin = null; - // expand subj first - if (subjURIs.size() > 1){ - finalJoin = getReplaceJoin(subjURIs, true, subjVar, objVar, predVar, cntxtVar); - } - // now expand the obj - if (objURIs.size() > 1){ - // if we already expanded the subj - if (finalJoin != null){ - // we know what this is since we created it - DoNotExpandSP origStatement = (DoNotExpandSP) finalJoin.getRightArg(); - String s = UUID.randomUUID().toString(); - Var dummyVar = new Var(s); - StatementPattern origDummyStatement = new DoNotExpandSP(origStatement.getSubjectVar(), origStatement.getPredicateVar(), dummyVar, cntxtVar); - FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), objVar, cntxtVar); - for (Resource sameAs : objURIs){ - NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)objVar.getValue(), getVarValue(cntxtVar)); - fsp.statements.add(newStatement); - } - InferJoin interimJoin = new InferJoin(fsp, origDummyStatement); - finalJoin = new InferJoin(finalJoin.getLeftArg(), interimJoin); - } - else { - finalJoin = getReplaceJoin(objURIs, false, subjVar, objVar, predVar, cntxtVar); - } - - } - if (finalJoin != null){ - node.replaceWith(finalJoin); - } - } - } - } - - private InferJoin getReplaceJoin(Set uris, boolean subSubj, Var subjVar, Var objVar, Var predVar, Var cntxtVar){ - String s = UUID.randomUUID().toString(); - Var dummyVar = new Var(s); - StatementPattern origStatement; - Var subVar; - if (subSubj){ - subVar = subjVar; - origStatement = new DoNotExpandSP(dummyVar, predVar, objVar, cntxtVar); - } - else { - subVar = objVar; - origStatement = new DoNotExpandSP(subjVar, predVar, dummyVar, cntxtVar); - } - FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), subVar, cntxtVar); - for (Resource sameAs : uris){ - NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)subVar.getValue(), getVarValue(cntxtVar)); - fsp.statements.add(newStatement); - } - InferJoin join = new InferJoin(fsp, origStatement); - join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE); - return join; - } - - protected Resource getVarValue(Var var) { - if (var == null) - return null; - else - return (Resource)var.getValue(); - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java deleted file mode 100644 index 664b5af53..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java +++ /dev/null @@ -1,108 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.utils.NullableStatementImpl; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -import java.util.Collection; -import java.util.UUID; - -/** - * Class SubClassOfVisitor - * Date: Mar 29, 2011 - * Time: 11:28:34 AM - */ -public class SubClassOfVisitor extends AbstractInferVisitor { - - public SubClassOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) { - super(conf, inferenceEngine); - include = conf.isInferSubClassOf(); - } - - @Override - protected void meetSP(StatementPattern node) throws Exception { - StatementPattern sp = node.clone(); - final Var predVar = sp.getPredicateVar(); - final Var objVar = sp.getObjectVar(); - final Var conVar = sp.getContextVar(); - if (predVar != null && objVar != null && objVar.getValue() != null && RDF.TYPE.equals(predVar.getValue()) - && !EXPANDED.equals(conVar)) { - /** - * ?type sesame:directSubClassOf ub:Student . ?student rdf:type ?type + - */ -// String s = UUID.randomUUID().toString(); -// Var typeVar = new Var(s); -// StatementPattern subClassOf = new StatementPattern(typeVar, new Var("c-" + s, SESAME.DIRECTSUBCLASSOF), objVar, SUBCLASS_EXPANDED); -// StatementPattern rdfType = new StatementPattern(sp.getSubjectVar(), sp.getPredicateVar(), typeVar, SUBCLASS_EXPANDED); -// InferJoin join = new InferJoin(subClassOf, rdfType); -// join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE); -// node.replaceWith(join); - - URI subclassof_uri = (URI) objVar.getValue(); - Collection parents = inferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), subclassof_uri); - if (parents != null && parents.size() > 0) { - String s = UUID.randomUUID().toString(); - Var typeVar = new Var(s); - FixedStatementPattern fsp = new FixedStatementPattern(typeVar, new Var("c-" + s, RDFS.SUBCLASSOF), objVar, conVar); - fsp.statements.add(new NullableStatementImpl(subclassof_uri, RDFS.SUBCLASSOF, subclassof_uri)); - for (URI u : parents) { - fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBCLASSOF, subclassof_uri)); - } - - StatementPattern rdfType = new DoNotExpandSP(sp.getSubjectVar(), sp.getPredicateVar(), typeVar, conVar); - InferJoin join = new InferJoin(fsp, rdfType); - join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE); - node.replaceWith(join); - } - -// if (parents != null && parents.size() > 0) { -// StatementPatterns statementPatterns = new StatementPatterns(); -// statementPatterns.patterns.add(node); -// Var subjVar = node.getSubjectVar(); -// for (URI u : parents) { -// statementPatterns.patterns.add(new StatementPattern(subjVar, predVar, new Var(objVar.getName(), u))); -// } -// node.replaceWith(statementPatterns); -// } - -// if (parents != null && parents.size() > 0) { -// VarCollection vc = new VarCollection(); -// vc.setName(objVar.getName()); -// vc.values.add(objVar); -// for (URI u : parents) { -// vc.values.add(new Var(objVar.getName(), u)); -// } -// Var subjVar = node.getSubjectVar(); -// node.replaceWith(new StatementPattern(subjVar, predVar, vc, node.getContextVar())); -// } - } - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java deleted file mode 100644 index 4df45a971..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java +++ /dev/null @@ -1,121 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.utils.NullableStatementImpl; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import mvm.rya.rdftriplestore.utils.FixedStatementPattern; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -import java.util.Set; -import java.util.UUID; - -/** - * All predicates are changed - * Class SubPropertyOfVisitor - * Date: Mar 29, 2011 - * Time: 11:28:34 AM - */ -public class SubPropertyOfVisitor extends AbstractInferVisitor { - - public SubPropertyOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) { - super(conf, inferenceEngine); - include = conf.isInferSubPropertyOf(); - } - - @Override - protected void meetSP(StatementPattern node) throws Exception { - StatementPattern sp = node.clone(); - final Var predVar = sp.getPredicateVar(); - - URI pred = (URI) predVar.getValue(); - String predNamespace = pred.getNamespace(); - - final Var objVar = sp.getObjectVar(); - final Var cntxtVar = sp.getContextVar(); - if (objVar != null && - !RDF.NAMESPACE.equals(predNamespace) && - !SESAME.NAMESPACE.equals(predNamespace) && - !RDFS.NAMESPACE.equals(predNamespace) - && !EXPANDED.equals(cntxtVar)) { - /** - * - * { ?subProp rdfs:subPropertyOf ub:worksFor . ?y ?subProp }\n" + - " UNION " + - " { ?y ub:worksFor } - */ -// String s = UUID.randomUUID().toString(); -// Var subPropVar = new Var(s); -// StatementPattern subPropOf = new StatementPattern(subPropVar, new Var("c-" + s, SESAME.DIRECTSUBPROPERTYOF), predVar, EXPANDED); -// StatementPattern subPropOf2 = new StatementPattern(sp.getSubjectVar(), subPropVar, objVar, EXPANDED); -// InferJoin join = new InferJoin(subPropOf, subPropOf2); -// join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE); -// node.replaceWith(join); - -// Collection parents = inferenceEngine.findParents(inferenceEngine.subPropertyOfGraph, (URI) predVar.getValue()); -// if (parents != null && parents.size() > 0) { -// StatementPatterns statementPatterns = new StatementPatterns(); -// statementPatterns.patterns.add(node); -// Var subjVar = node.getSubjectVar(); -// for (URI u : parents) { -// statementPatterns.patterns.add(new StatementPattern(subjVar, new Var(predVar.getName(), u), objVar)); -// } -// node.replaceWith(statementPatterns); -// } -// if (parents != null && parents.size() > 0) { -// VarCollection vc = new VarCollection(); -// vc.setName(predVar.getName()); -// vc.values.add(predVar); -// for (URI u : parents) { -// vc.values.add(new Var(predVar.getName(), u)); -// } -// Var subjVar = node.getSubjectVar(); -// node.replaceWith(new StatementPattern(subjVar, vc, objVar, node.getContextVar())); -// } - - URI subprop_uri = (URI) predVar.getValue(); - Set parents = inferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), subprop_uri); - if (parents != null && parents.size() > 0) { - String s = UUID.randomUUID().toString(); - Var typeVar = new Var(s); - FixedStatementPattern fsp = new FixedStatementPattern(typeVar, new Var("c-" + s, RDFS.SUBPROPERTYOF), predVar, cntxtVar); -// fsp.statements.add(new NullableStatementImpl(subprop_uri, RDFS.SUBPROPERTYOF, subprop_uri)); - //add self - parents.add(subprop_uri); - for (URI u : parents) { - fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBPROPERTYOF, subprop_uri)); - } - - StatementPattern rdfType = new DoNotExpandSP(sp.getSubjectVar(), typeVar, sp.getObjectVar(), cntxtVar); - InferJoin join = new InferJoin(fsp, rdfType); - join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE); - node.replaceWith(join); - } - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java deleted file mode 100644 index 63c073b98..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java +++ /dev/null @@ -1,78 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Union; -import org.openrdf.query.algebra.Var; - -/** - * All predicates are changed - * Class SubPropertyOfVisitor - * Date: Mar 29, 2011 - * Time: 11:28:34 AM - */ -public class SymmetricPropertyVisitor extends AbstractInferVisitor { - - public SymmetricPropertyVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) { - super(conf, inferenceEngine); - include = conf.isInferSymmetricProperty(); - } - - @Override - protected void meetSP(StatementPattern node) throws Exception { - StatementPattern sp = node.clone(); - - final Var predVar = sp.getPredicateVar(); - URI pred = (URI) predVar.getValue(); - String predNamespace = pred.getNamespace(); - - final Var objVar = sp.getObjectVar(); - final Var cntxtVar = sp.getContextVar(); - if (objVar != null && - !RDF.NAMESPACE.equals(predNamespace) && - !SESAME.NAMESPACE.equals(predNamespace) && - !RDFS.NAMESPACE.equals(predNamespace) - && !EXPANDED.equals(cntxtVar)) { - /** - * - * { ?a ?pred ?b .}\n" + - " UNION " + - " { ?b ?pred ?a } - */ - - URI symmPropUri = (URI) predVar.getValue(); - if(inferenceEngine.isSymmetricProperty(symmPropUri)) { - Var subjVar = sp.getSubjectVar(); - Union union = new InferUnion(); - union.setLeftArg(sp); - union.setRightArg(new StatementPattern(objVar, predVar, subjVar, cntxtVar)); - node.replaceWith(union); - } - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java deleted file mode 100644 index 2f795fb8b..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java +++ /dev/null @@ -1,69 +0,0 @@ -package mvm.rya.rdftriplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.rdftriplestore.utils.TransitivePropertySP; -import mvm.rya.rdftriplestore.utils.TransitivePropertySP; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.model.vocabulary.SESAME; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -/** - * All predicates are changed - * Class SubPropertyOfVisitor - * Date: Mar 29, 2011 - * Time: 11:28:34 AM - */ -public class TransitivePropertyVisitor extends AbstractInferVisitor { - - public TransitivePropertyVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) { - super(conf, inferenceEngine); - include = conf.isInferTransitiveProperty(); - } - - @Override - protected void meetSP(StatementPattern node) throws Exception { - StatementPattern sp = node.clone(); - final Var predVar = sp.getPredicateVar(); - - URI pred = (URI) predVar.getValue(); - String predNamespace = pred.getNamespace(); - - final Var objVar = sp.getObjectVar(); - final Var cntxtVar = sp.getContextVar(); - if (objVar != null && - !RDF.NAMESPACE.equals(predNamespace) && - !SESAME.NAMESPACE.equals(predNamespace) && - !RDFS.NAMESPACE.equals(predNamespace) - && !EXPANDED.equals(cntxtVar)) { - - URI transPropUri = (URI) predVar.getValue(); - if (inferenceEngine.isTransitiveProperty(transPropUri)) { - node.replaceWith(new TransitivePropertySP(sp.getSubjectVar(), sp.getPredicateVar(), sp.getObjectVar(), sp.getContextVar())); - } - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java b/sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java deleted file mode 100644 index dd8e4fab3..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java +++ /dev/null @@ -1,167 +0,0 @@ -package mvm.rya.rdftriplestore.namespace; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.persist.RdfDAOException; -import mvm.rya.api.persist.RyaDAO; -import mvm.rya.api.persist.RyaNamespaceManager; -import net.sf.ehcache.Cache; -import net.sf.ehcache.CacheManager; -import net.sf.ehcache.Element; -import net.sf.ehcache.Statistics; -import org.openrdf.model.Namespace; -import org.openrdf.sail.SailException; - -import java.io.InputStream; - -/** - * Class NamespaceManager - * Date: Oct 17, 2011 - * Time: 8:25:33 AM - */ -public class NamespaceManager { - CacheManager cacheManager; - Cache namespaceCache; - public static final String NAMESPACE_CACHE_NAME = "namespace"; - private RdfCloudTripleStoreConfiguration conf; - private RyaNamespaceManager namespaceManager; - - public NamespaceManager(RyaDAO ryaDAO, RdfCloudTripleStoreConfiguration conf) { - this.conf = conf; - initialize(ryaDAO); - } - - protected void initialize(RyaDAO ryaDAO) { - try { - this.namespaceManager = ryaDAO.getNamespaceManager(); - - InputStream cacheConfigStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("ehcache.xml"); - if (cacheConfigStream == null) { - this.cacheManager = CacheManager.create(); -// throw new RuntimeException("Cache Configuration does not exist"); - } else { - this.cacheManager = CacheManager.create(cacheConfigStream); - } - this.namespaceCache = cacheManager.getCache(NAMESPACE_CACHE_NAME); - if (namespaceCache == null) { - cacheManager.addCache(NAMESPACE_CACHE_NAME); - } - - - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public void shutdown() { - if (cacheManager != null) { - cacheManager.shutdown(); - cacheManager = null; - } - } - - public void addNamespace(String pfx, String namespace) { - try { - String savedNamespace = getNamespace(pfx); - //if the saved ns is the same one being saved, don't do anything - if (savedNamespace != null && savedNamespace.equals(namespace)) { - return; - } - - namespaceCache.put(new Element(pfx, namespace)); - namespaceManager.addNamespace(pfx, namespace); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public String getNamespace(String pfx) { - //try in the cache first - Element element = namespaceCache.get(pfx); - if (element != null) { - return (String) element.getValue(); - } - - try { - String namespace = namespaceManager.getNamespace(pfx); - if (namespace != null) { - namespaceCache.put(new Element(pfx, namespace)); - return namespace; - } - } catch (Exception e) { - //TODO: print or log? - } - return null; - - } - - public void removeNamespace(String pfx) { - try { - namespaceCache.remove(pfx); - namespaceManager.removeNamespace(pfx); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public CloseableIteration iterateNamespace() { - try { - //for this one we will go directly to the store - final CloseableIteration iteration = namespaceManager.iterateNamespace(); - return new CloseableIteration() { - @Override - public void close() throws SailException { - iteration.close(); - } - - @Override - public boolean hasNext() throws SailException { - return iteration.hasNext(); - } - - @Override - public Namespace next() throws SailException { - return iteration.next(); - } - - @Override - public void remove() throws SailException { - iteration.remove(); - } - }; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public void printStatistics() { - Statistics statistics = namespaceCache.getStatistics(); - if (statistics != null) { //TODO: use a logger please - System.out.println("Namespace Cache Statisitics: "); - System.out.println("--Hits: \t" + statistics.getCacheHits()); - System.out.println("--Misses: \t" + statistics.getCacheMisses()); - System.out.println("--Total Count: \t" + statistics.getObjectCount()); - } - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java deleted file mode 100644 index b7f7623e5..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java +++ /dev/null @@ -1,165 +0,0 @@ -package mvm.rya.rdftriplestore.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.OpenRDFUtil; -import org.openrdf.model.*; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryException; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.helpers.RDFHandlerBase; - -import java.util.HashMap; -import java.util.Map; - -/** - * Created by IntelliJ IDEA. - * User: RoshanP - * Date: 3/23/12 - * Time: 9:50 AM - * To change this template use File | Settings | File Templates. - */ -public class CombineContextsRdfInserter extends RDFHandlerBase { - - private final RepositoryConnection con; - private Resource[] contexts = new Resource[0]; - private boolean preserveBNodeIDs; - private final Map namespaceMap; - private final Map bNodesMap; - - public CombineContextsRdfInserter(RepositoryConnection con) { - this.con = con; - preserveBNodeIDs = true; - namespaceMap = new HashMap(); - bNodesMap = new HashMap(); - } - - public void setPreserveBNodeIDs(boolean preserveBNodeIDs) { - this.preserveBNodeIDs = preserveBNodeIDs; - } - - public boolean preservesBNodeIDs() { - return preserveBNodeIDs; - } - - public void enforceContext(Resource... contexts) { - OpenRDFUtil.verifyContextNotNull(contexts); - this.contexts = contexts; - } - - public boolean enforcesContext() { - return contexts.length != 0; - } - - public Resource[] getContexts() { - return contexts; - } - - @Override - public void endRDF() - throws RDFHandlerException { - for (Map.Entry entry : namespaceMap.entrySet()) { - String prefix = entry.getKey(); - String name = entry.getValue(); - - try { - if (con.getNamespace(prefix) == null) { - con.setNamespace(prefix, name); - } - } catch (RepositoryException e) { - throw new RDFHandlerException(e); - } - } - - namespaceMap.clear(); - bNodesMap.clear(); - } - - @Override - public void handleNamespace(String prefix, String name) { - // FIXME: set namespaces directly when they are properly handled wrt - // rollback - // don't replace earlier declarations - if (prefix != null && !namespaceMap.containsKey(prefix)) { - namespaceMap.put(prefix, name); - } - } - - @Override - public void handleStatement(Statement st) - throws RDFHandlerException { - Resource subj = st.getSubject(); - URI pred = st.getPredicate(); - Value obj = st.getObject(); - Resource ctxt = st.getContext(); - - if (!preserveBNodeIDs) { - if (subj instanceof BNode) { - subj = mapBNode((BNode) subj); - } - - if (obj instanceof BNode) { - obj = mapBNode((BNode) obj); - } - - if (!enforcesContext() && ctxt instanceof BNode) { - ctxt = mapBNode((BNode) ctxt); - } - } - - try { - if (enforcesContext()) { - Resource[] ctxts = contexts; - if (ctxt != null) { - ctxts = combineContexts(contexts, ctxt); - } - con.add(subj, pred, obj, ctxts); - } else { - con.add(subj, pred, obj, ctxt); - } - } catch (RepositoryException e) { - throw new RDFHandlerException(e); - } - } - - private BNode mapBNode(BNode bNode) { - BNode result = bNodesMap.get(bNode.getID()); - - if (result == null) { - result = con.getRepository().getValueFactory().createBNode(); - bNodesMap.put(bNode.getID(), result); - } - - return result; - } - - public static Resource[] combineContexts(Resource[] contexts, Resource ctxt) { - if (contexts == null || ctxt == null) { - throw new IllegalArgumentException("Contexts cannot be null"); - } - int length = contexts.length; - Resource[] ret = new Resource[length + 1]; - System.arraycopy(contexts, 0, ret, 0, length); - ret[length] = ctxt; - return ret; - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java deleted file mode 100644 index d86140b9f..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java +++ /dev/null @@ -1,58 +0,0 @@ -package mvm.rya.rdftriplestore.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics; - -/** - * Class DefaultStatistics - * Date: Apr 12, 2011 - * Time: 1:31:05 PM - */ -public class DefaultStatistics extends EvaluationStatistics { - - public DefaultStatistics() { - } - - @Override - protected CardinalityCalculator createCardinalityCalculator() { - return new DefaultCardinalityCalculator(); - } - - public class DefaultCardinalityCalculator extends CardinalityCalculator { - - double count = 0.0; - - @Override - protected double getCardinality(StatementPattern sp) { - //based on how many (subj, pred, obj) are set -// int numSet = 3; -// if (sp.getSubjectVar().hasValue()) numSet--; -// if (sp.getPredicateVar().hasValue()) numSet--; -// if (sp.getObjectVar().hasValue()) numSet--; -// return numSet; - return count++; - } - } - -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java deleted file mode 100644 index 891e12231..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java +++ /dev/null @@ -1,59 +0,0 @@ -package mvm.rya.rdftriplestore.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.model.Statement; -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -import java.util.ArrayList; -import java.util.Collection; - -/** - * StatementPattern gives fixed statements back - * - * Class FixedStatementPattern - * Date: Mar 12, 2012 - * Time: 2:42:06 PM - */ -public class FixedStatementPattern extends StatementPattern { - public Collection statements = new ArrayList(); - - public FixedStatementPattern() { - } - - public FixedStatementPattern(Var subject, Var predicate, Var object) { - super(subject, predicate, object); - } - - public FixedStatementPattern(Scope scope, Var subject, Var predicate, Var object) { - super(scope, subject, predicate, object); - } - - public FixedStatementPattern(Var subject, Var predicate, Var object, Var context) { - super(subject, predicate, object, context); - } - - public FixedStatementPattern(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) { - super(scope, subjVar, predVar, objVar, conVar); - } -} diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java deleted file mode 100644 index 4f2e378b9..000000000 --- a/sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java +++ /dev/null @@ -1,52 +0,0 @@ -package mvm.rya.rdftriplestore.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.openrdf.query.algebra.StatementPattern; -import org.openrdf.query.algebra.Var; - -/** - * Class TransitivePropertySP - * Date: Mar 14, 2012 - * Time: 5:23:10 PM - */ -public class TransitivePropertySP extends StatementPattern { - - public TransitivePropertySP() { - } - - public TransitivePropertySP(Var subject, Var predicate, Var object) { - super(subject, predicate, object); - } - - public TransitivePropertySP(Scope scope, Var subject, Var predicate, Var object) { - super(scope, subject, predicate, object); - } - - public TransitivePropertySP(Var subject, Var predicate, Var object, Var context) { - super(subject, predicate, object, context); - } - - public TransitivePropertySP(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) { - super(scope, subjVar, predVar, objVar, conVar); - } -} diff --git a/sail/src/main/resources/META-INF/org.openrdf.store.schemas b/sail/src/main/resources/META-INF/org.openrdf.store.schemas deleted file mode 100644 index ad9993f38..000000000 --- a/sail/src/main/resources/META-INF/org.openrdf.store.schemas +++ /dev/null @@ -1 +0,0 @@ -META-INF/schemas/cloudbasestore-schema.ttl \ No newline at end of file diff --git a/sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl b/sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl deleted file mode 100644 index 708a9648e..000000000 --- a/sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl +++ /dev/null @@ -1,20 +0,0 @@ -@prefix rdfs: . -@prefix rep: . -@prefix sr: . -@prefix sail: . -@prefix cbs: . - -[] a rep:Repository ; - rep:repositoryID "{%Repository ID|cloudbasestore%}" ; - rdfs:label "{%Repository title|Cloudbase store%}" ; - rep:repositoryImpl [ - rep:repositoryType "openrdf:SailRepository" ; - sr:sailImpl [ - sail:sailType "openrdf:RdfCloudTripleStore" ; - cbs:server "{%CBSail server|stratus13%}" ; - cbs:port "{%CBSail port|2181%}" ; - cbs:instance "{%CBSail instance|stratus%}" ; - cbs:user "{%CBSail user|root%}" ; - cbs:password "{%CBSail password|password%}" ; - ] - ]. \ No newline at end of file diff --git a/sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory b/sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory deleted file mode 100644 index 09a06617e..000000000 --- a/sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory +++ /dev/null @@ -1 +0,0 @@ -mvm.rya.rdftriplestore.RdfCloudTripleStoreFactory \ No newline at end of file diff --git a/sail/src/main/resources/ehcache.xml b/sail/src/main/resources/ehcache.xml deleted file mode 100644 index 7049c001b..000000000 --- a/sail/src/main/resources/ehcache.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - diff --git a/sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java b/sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java deleted file mode 100644 index 4a5d871d6..000000000 --- a/sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java +++ /dev/null @@ -1,500 +0,0 @@ -package mvm.rya; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.openrdf.model.Resource; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.resultio.text.tsv.SPARQLResultsTSVWriter; -import org.openrdf.repository.Repository; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryException; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFParseException; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; -import mvm.rya.rdftriplestore.RyaSailRepository; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.namespace.NamespaceManager; -import junit.framework.TestCase; - -/** - * The purpose of this is to provide a test case that illustrates a failure that is being encountered. A working test is - * provided as well to demonstrate that a successful query can be made. - */ -public class ArbitraryLengthQueryTest extends TestCase { - - /** - * The repository used for the tests. - */ - private Repository repository; - - @Override - public void setUp() throws Exception { - super.setUp(); - - final RdfCloudTripleStore store = new MockRdfCloudStore(); - - final NamespaceManager nm = new NamespaceManager(store.getRyaDAO(), store.getConf()); - store.setNamespaceManager(nm); - - repository = new RyaSailRepository(store); - repository.initialize(); - - load(); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - repository.shutDown(); - } - - /** - * This test works. The expected result is 6 rows ranging from "Model1Class 1" through "Model1Class 6". - * - * @throws RepositoryException - * @throws QueryEvaluationException - * @throws TupleQueryResultHandlerException - * - * @throws MalformedQueryException - */ - public void testWithoutSubquery() throws RepositoryException, QueryEvaluationException, TupleQueryResultHandlerException, MalformedQueryException { - final String query = "SELECT ?i ?i_label ?i_class ?i_v1" - + "WHERE {" - + "?i ?i_label ." - + "?i a ?i_class ." - + "?i_class * ." - + "OPTIONAL { ?i ?i_v1 } ." - + "}" - + "ORDER BY ?i_label"; - - final RepositoryConnection conn = repository.getConnection(); - final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - RdfCloudTripleStoreConnectionTest.CountTupleHandler countTupleHandler = new RdfCloudTripleStoreConnectionTest.CountTupleHandler(); - tupleQuery.evaluate(countTupleHandler); - assertEquals(6, countTupleHandler.getCount()); - conn.close(); - } - - /** - * This test fails. The expected result is 6 rows ranging from "Model1Class 1 Event" to "Model1Class 6 Event". The - * current result is a RejectedExecutionException. - * - * @throws RepositoryException - * @throws QueryEvaluationException - * @throws TupleQueryResultHandlerException - * - * @throws MalformedQueryException - */ - public void testWithSubquery() throws RepositoryException, QueryEvaluationException, TupleQueryResultHandlerException, MalformedQueryException { - final String query = "SELECT ?i ?i_label ?i_class ?i_v1 ?i_v2 ?i_v2_label ?i_v2_class ?i_v2_v1" - + "WHERE {" - + "?i ?i_label ." - + "?i a ?i_class ." - + "?i_class * ." - + "OPTIONAL { ?i ?i_v1 } ." - + "?i ?i_v2 ." - + "{" - + "SELECT ?i_v2 ?i_v2_label ?i_v2_class ?i_v2_v1" - + "WHERE {" - + "?i_v2 ?i_v2_label ." - + "?i_v2 a ?i_v2_class ." - + "?i_v2_class * ." - + "OPTIONAL { ?i_v2 ?i_v2_v1 } ." - + "}" - + "ORDER BY ?i_v2_label" - + "}" - + "}" - + "ORDER BY ?i_label"; - - final RepositoryConnection conn = repository.getConnection(); - final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - RdfCloudTripleStoreConnectionTest.CountTupleHandler countTupleHandler = new RdfCloudTripleStoreConnectionTest.CountTupleHandler(); - tupleQuery.evaluate(countTupleHandler); - assertEquals(6, countTupleHandler.getCount()); - conn.close(); - } - - /** - * Load the t-box and a-box turtle from strings defined within this class. - * - * @throws RepositoryException - * @throws RDFParseException - * @throws IOException - */ - private void load() throws RepositoryException, RDFParseException, IOException { - final RepositoryConnection conn = repository.getConnection(); - - // T-Box - String ttlString = MODEL_TTL; - InputStream stringInput = new ByteArrayInputStream(ttlString.getBytes()); - conn.add(stringInput, "http://dragon-research.com/cham/model/model1", RDFFormat.TURTLE, new Resource[]{}); - - // A-Box - ttlString = BUCKET_TTL; - stringInput = new ByteArrayInputStream(ttlString.getBytes()); - conn.add(stringInput, "http://dragon-research.com/cham/bucket/bucket1", RDFFormat.TURTLE, new Resource[]{}); - - conn.commit(); - conn.close(); - } - - /** - * Mock RDF cloud store for one shot testing. - */ - public class MockRdfCloudStore extends RdfCloudTripleStore { - public MockRdfCloudStore() { - super(); - final Instance instance = new MockInstance(); - try { - final AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - setConf(conf); - - final Connector connector = instance.getConnector("", ""); - final AccumuloRyaDAO cdao = new AccumuloRyaDAO(); - cdao.setConf(conf); - cdao.setConnector(connector); - setRyaDAO(cdao); - inferenceEngine = new InferenceEngine(); - inferenceEngine.setRyaDAO(cdao); - inferenceEngine.setRefreshGraphSchedule(5000); //every 5 sec - inferenceEngine.setConf(conf); - setInferenceEngine(inferenceEngine); - } catch (final Exception e) { - e.printStackTrace(); - } - } - } - - /** - * The ontology t-box in turtle. - */ - private static String MODEL_TTL = "@prefix : ." - + "@prefix cham: ." - + "@prefix dc: ." - + "@prefix owl: ." - + "@prefix qudt: ." - + "@prefix rdf: ." - + "@prefix rdfs: ." - + "@prefix unit: ." - + "@prefix xml: ." - + "@prefix xsd: ." - + "" - + "" - + " rdf:type owl:Ontology ;" - + " rdfs:label \"Model1 Ontology\"^^xsd:string ;" - + " :versionInfo \"0.1\"^^xsd:string ;" - + " dc:title \"Model1 Ontology\"^^xsd:string ." - + "" - + ":ModelClassD" - + " rdf:type owl:Class ;" - + " rdfs:label \"ModelClassD\"^^xsd:string ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:maxQualifiedCardinality" - + " \"1\"^^xsd:nonNegativeInteger ;" - + " owl:onDataRange xsd:string ;" - + " owl:onProperty :name" - + " ] ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:allValuesFrom :Model1ClassAssoc ;" - + " owl:onProperty :hasModel1ClassAssoc" - + " ] ." - + "" - + ":ModelClassC" - + " rdf:type owl:Class ;" - + " rdfs:label \"ModelClassC\"^^xsd:string ;" - + " rdfs:subClassOf :ModelClassD ." - + "" - + ":Modle1ClassB" - + " rdf:type owl:Class ;" - + " rdfs:label \"Modle1ClassB\"^^xsd:string ;" - + " rdfs:subClassOf :ModelClassC ." - + "" - + ":Model1ClassA" - + " rdf:type owl:Class ;" - + " rdfs:label \"Model1ClassA\"^^xsd:string ;" - + " rdfs:subClassOf :Modle1ClassB ." - + "" - + ":Model1Class" - + " rdf:type owl:Class ;" - + " rdfs:label \"Model1Class\"^^xsd:string ;" - + " rdfs:subClassOf :Model1ClassA ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:maxQualifiedCardinality" - + " \"1\"^^xsd:nonNegativeInteger ;" - + " owl:onDataRange xsd:string ;" - + " owl:onProperty :model1ClassId" - + " ] ." - + "" - + ":Model1Event" - + " rdf:type owl:Class ;" - + " rdfs:label \"Model1Event\"^^xsd:string ;" - + " rdfs:subClassOf :Event ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:allValuesFrom :Model1ClassA ;" - + " owl:onProperty :hasModel1ClassA" - + " ] ." - + "" - + ":Model1ClassAssoc" - + " rdf:type owl:Class ;" - + " rdfs:label \"Model1ClassAssoc\"^^xsd:string ;" - + " rdfs:subClassOf owl:Thing ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:maxQualifiedCardinality" - + " \"1\"^^xsd:nonNegativeInteger ;" - + " owl:onDataRange xsd:string ;" - + " owl:onProperty :name" - + " ] ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:maxQualifiedCardinality" - + " \"1\"^^xsd:nonNegativeInteger ;" - + " owl:onClass :ModelClassD ;" - + " owl:onProperty :hasEntity" - + " ] ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:allValuesFrom :ModelClassD ;" - + " owl:onProperty :hasEntity" - + " ] ." - + "" - + ":TemporalEntity" - + " rdf:type owl:Class ;" - + " rdfs:label \"TemporalEntity\"^^xsd:string ;" - + " rdfs:subClassOf owl:Thing ." - + "" - + ":TemporalInstant" - + " rdf:type owl:Class ;" - + " rdfs:label \"TemporalInstant\"^^xsd:string ;" - + " rdfs:subClassOf :TemporalEntity ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:maxQualifiedCardinality" - + " \"1\"^^xsd:nonNegativeInteger ;" - + " owl:onDataRange xsd:dateTime ;" - + " owl:onProperty :dateTime" - + " ] ." - + "" - + ":model1ClassId" - + " rdf:type owl:DatatypeProperty ;" - + " rdfs:domain :Model1Class ;" - + " rdfs:label \"model1ClassId\"^^xsd:string ;" - + " rdfs:range xsd:string ." - + "" - + ":hasModel1ClassAssoc" - + " rdf:type owl:ObjectProperty ;" - + " rdfs:domain :ModelClassD ;" - + " rdfs:label \"hasModel1ClassAssoc\"^^xsd:string ;" - + " rdfs:range :Model1ClassAssoc ." - + "" - + ":name" - + " rdf:type owl:DatatypeProperty ;" - + " rdfs:domain :Model1ClassAssoc , :ModelClassD ;" - + " rdfs:label \"name\"^^xsd:string ;" - + " rdfs:range xsd:string ." - + "" - + ":hasTemporalEntity" - + " rdf:type owl:ObjectProperty ;" - + " rdfs:domain :ThreatAnalysis , :Event , :TrackingData , :Threat , :Vulnerability ;" - + " rdfs:label \"hasTemporalEntity\"^^xsd:string ;" - + " rdfs:range :TemporalEntity ." - + "" - + ":hasEntity" - + " rdf:type owl:ObjectProperty ;" - + " rdfs:domain :Model1ClassAssoc ;" - + " rdfs:label \"hasEntity\"^^xsd:string ;" - + " rdfs:range :ModelClassD ." - + "" - + ":dateTime" - + " rdf:type owl:DatatypeProperty ;" - + " rdfs:domain :TemporalInstant ;" - + " rdfs:label \"dateTime\"^^xsd:string ;" - + " rdfs:range xsd:dateTime ." - + "" - + ":Event" - + " rdf:type owl:Class ;" - + " rdfs:label \"Event\"^^xsd:string ;" - + " rdfs:subClassOf :ModelClassD ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:allValuesFrom :TemporalEntity ;" - + " owl:onProperty :hasTemporalEntity" - + " ] ;" - + " rdfs:subClassOf" - + " [ rdf:type owl:Restriction ;" - + " owl:maxQualifiedCardinality" - + " \"1\"^^xsd:nonNegativeInteger ;" - + " owl:onClass :TemporalEntity ;" - + " owl:onProperty :hasTemporalEntity" - + " ] ." - + "" - + ":hasModel1ClassA" - + " rdf:type owl:ObjectProperty ;" - + " rdfs:domain :Model1Event ;" - + " rdfs:label \"hasModel1ClassA\"^^xsd:string ;" - + " rdfs:range :Model1ClassA ." - + "" - + "rdfs:label" - + " rdf:type owl:AnnotationProperty ." - + "" - + "xsd:date" - + " rdf:type rdfs:Datatype ." - + "" - + "xsd:time" - + " rdf:type rdfs:Datatype ."; - - /** - * The ontology a-box in turtle. - */ - private static String BUCKET_TTL = "@prefix : ." - + "@prefix rdfs: ." - + "@prefix owl: ." - + "@prefix xsd: ." - + "@prefix rdf: ." - + "@prefix model1: ." - + "" - + ":i1 a model1:Model1Class ;" - + " rdfs:label \"Model1Class 1\"^^xsd:string ;" - + " model1:name \"Model1Class 1\"^^xsd:string ;" - + " model1:hasModel1ClassAssoc :i1-assoc ;" - + " model1:model1ClassId \"ID01\"^^xsd:string ." - + " " - + ":i1-assoc a model1:Model1ClassAssoc ;" - + " rdfs:label \"Model1Class 1 Assoc\"^^xsd:string ;" - + " model1:hasEntity :i1-event ." - + " " - + ":i1-event a model1:Model1Event ;" - + " rdfs:label \"Model1Class 1 Event\"^^xsd:string ;" - + " model1:hasTemporalEntity :i1-time ." - + "" - + ":i1-time a model1:TemporalInstant ;" - + " rdfs:label \"Model1Class 1 Time\"^^xsd:string ;" - + " model1:dateTime \"1994-02-07T21:47:01.000Z\"^^xsd:dateTime ." - + " " - + ":i2 a model1:Model1Class ;" - + " rdfs:label \"Model1Class 2\"^^xsd:string ;" - + " model1:name \"Model1Class 2\"^^xsd:string ;" - + " model1:hasModel1ClassAssoc :i2-assoc ;" - + " model1:model1ClassId \"ID02\"^^xsd:string ." - + "" - + ":i2-assoc a model1:Model1ClassAssoc ;" - + " rdfs:label \"Model1Class 2 Assoc\"^^xsd:string ;" - + " model1:hasEntity :i2-event ." - + " " - + ":i2-event a model1:Model1Event ;" - + " rdfs:label \"Model1Class 2 Event\"^^xsd:string ;" - + " model1:hasTemporalEntity :i2-time ." - + "" - + ":i2-time a model1:TemporalInstant ;" - + " rdfs:label \"Model1Class 2 Time\"^^xsd:string ;" - + " model1:dateTime \"1995-11-06T05:15:01.000Z\"^^xsd:dateTime ." - + " " - + ":i3 a model1:Model1Class ;" - + " rdfs:label \"Model1Class 3\"^^xsd:string ;" - + " model1:name \"Model1Class 3\"^^xsd:string ;" - + " model1:hasModel1ClassAssoc :i3-assoc ;" - + " model1:model1ClassId \"ID03\"^^xsd:string ." - + "" - + ":i3-assoc a model1:Model1ClassAssoc ;" - + " rdfs:label \"Model1Class 3 Assoc\"^^xsd:string ;" - + " model1:hasEntity :i3-event ." - + " " - + ":i3-event a model1:Model1Event ;" - + " rdfs:label \"Model1Class 3 Event\"^^xsd:string ;" - + " model1:hasTemporalEntity :i3-time ." - + "" - + ":i3-time a model1:TemporalInstant ;" - + " rdfs:label \"Model1Class 3 Time\"^^xsd:string ;" - + " model1:dateTime \"1999-04-30T16:30:00.000Z\"^^xsd:dateTime ." - + " " - + ":i4 a model1:Model1Class ;" - + " rdfs:label \"Model1Class 4\"^^xsd:string ;" - + " model1:name \"Model1Class 4\"^^xsd:string ;" - + " model1:hasModel1ClassAssoc :i4-assoc ;" - + " model1:model1ClassId \"ID04\"^^xsd:string ." - + "" - + ":i4-assoc a model1:Model1ClassAssoc ;" - + " rdfs:label \"Model1Class 4 Assoc\"^^xsd:string ;" - + " model1:hasEntity :i4-event ." - + " " - + ":i4-event a model1:Model1Event ;" - + " rdfs:label \"Model1Class 4 Event\"^^xsd:string ;" - + " model1:hasTemporalEntity :i4-time ." - + "" - + ":i4-time a model1:TemporalInstant ;" - + " rdfs:label \"Model1Class 4 Time\"^^xsd:string ;" - + " model1:dateTime \"2001-02-27T21:20:00.000Z\"^^xsd:dateTime ." - + " " - + ":i5 a model1:Model1Class ;" - + " rdfs:label \"Model1Class 5\"^^xsd:string ;" - + " model1:name \"Model1Class 5\"^^xsd:string ;" - + " model1:hasModel1ClassAssoc :i5-assoc ;" - + " model1:model1ClassId \"ID05\"^^xsd:string ." - + "" - + ":i5-assoc a model1:Model1ClassAssoc ;" - + " rdfs:label \"Model1Class 5 Assoc\"^^xsd:string ;" - + " model1:hasEntity :i5-event ." - + " " - + ":i5-event a model1:Model1Event ;" - + " rdfs:label \"Model1Class 5 Event\"^^xsd:string ;" - + " model1:hasTemporalEntity :i5-time ." - + "" - + ":i5-time a model1:TemporalInstant ;" - + " rdfs:label \"Model1Class 5 Time\"^^xsd:string ;" - + " model1:dateTime \"2002-01-16T00:30:00.000Z\"^^xsd:dateTime ." - + " " - + ":i6 a model1:Model1Class ;" - + " rdfs:label \"Model1Class 6\"^^xsd:string ;" - + " model1:name \"Model1Class 6\"^^xsd:string ;" - + " model1:hasModel1ClassAssoc :i6-assoc ;" - + " model1:model1ClassId \"ID06\"^^xsd:string ." - + "" - + ":i6-assoc a model1:Model1ClassAssoc ;" - + " rdfs:label \"Model1Class 6 Assoc\"^^xsd:string ;" - + " model1:hasEntity :i6-event ." - + " " - + ":i6-event a model1:Model1Event ;" - + " rdfs:label \"Model1Class 6 Event\"^^xsd:string ;" - + " model1:hasTemporalEntity :i6-time ." - + "" - + ":i6-time a model1:TemporalInstant ;" - + " rdfs:label \"Model1Class 6 Time\"^^xsd:string ;" - + " model1:dateTime \"2003-04-08T13:43:00.000Z\"^^xsd:dateTime ."; -} diff --git a/sail/src/test/java/mvm/rya/HashJoinTest.java b/sail/src/test/java/mvm/rya/HashJoinTest.java deleted file mode 100644 index bbcdbcd8f..000000000 --- a/sail/src/test/java/mvm/rya/HashJoinTest.java +++ /dev/null @@ -1,374 +0,0 @@ -package mvm.rya; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.join.HashJoin; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashSet; -import java.util.Set; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertFalse; -import static junit.framework.Assert.assertTrue; - -/** - * Date: 7/24/12 - * Time: 5:51 PM - */ -public class HashJoinTest { - private AccumuloRyaDAO dao; - static String litdupsNS = "urn:test:litdups#"; - private Connector connector; - private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - @Before - public void init() throws Exception { - dao = new AccumuloRyaDAO(); - connector = new MockInstance().getConnector("", ""); - dao.setConnector(connector); - dao.setConf(conf); - dao.init(); - } - - @After - public void destroy() throws Exception { - dao.destroy(); - } - - @Test - public void testSimpleJoin() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - - - //1 join - HashJoin hjoin = new HashJoin(dao.getQueryEngine()); - CloseableIteration join = hjoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two)); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj3)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testSimpleJoinMultiWay() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj2, pred, three)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, three)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - dao.add(new RyaStatement(subj4, pred, four)); - - - //1 join - HashJoin hjoin = new HashJoin(dao.getQueryEngine()); - CloseableIteration join = hjoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj3)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testMergeJoinMultiWay() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj2, pred, three)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - dao.add(new RyaStatement(subj4, pred, four)); - - - //1 join - HashJoin hjoin = new HashJoin(dao.getQueryEngine()); - CloseableIteration join = hjoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testMergeJoinMultiWayNone() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - - - //1 join - HashJoin hjoin = new HashJoin(dao.getQueryEngine()); - CloseableIteration join = hjoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - assertFalse(join.hasNext()); - join.close(); - } - - @Test - public void testMergeJoinMultiWayNone2() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - - - //1 join - HashJoin hjoin = new HashJoin(dao.getQueryEngine()); - CloseableIteration join = hjoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - assertFalse(join.hasNext()); - join.close(); - } - - @Test - public void testSimpleHashJoinPredicateOnly() throws Exception { - //add data - RyaURI pred1 = new RyaURI(litdupsNS, "pred1"); - RyaURI pred2 = new RyaURI(litdupsNS, "pred2"); - RyaType one = new RyaType("1"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred1, one)); - dao.add(new RyaStatement(subj1, pred2, one)); - dao.add(new RyaStatement(subj2, pred1, one)); - dao.add(new RyaStatement(subj2, pred2, one)); - dao.add(new RyaStatement(subj3, pred1, one)); - dao.add(new RyaStatement(subj3, pred2, one)); - dao.add(new RyaStatement(subj4, pred1, one)); - dao.add(new RyaStatement(subj4, pred2, one)); - - - //1 join - HashJoin ijoin = new HashJoin(dao.getQueryEngine()); - CloseableIteration join = ijoin.join(null, pred1, pred2); - - int count = 0; - while (join.hasNext()) { - RyaStatement next = join.next(); - count++; - } - assertEquals(4, count); - join.close(); - } - - @Test - public void testSimpleMergeJoinPredicateOnly2() throws Exception { - //add data - RyaURI pred1 = new RyaURI(litdupsNS, "pred1"); - RyaURI pred2 = new RyaURI(litdupsNS, "pred2"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred1, one)); - dao.add(new RyaStatement(subj1, pred1, two)); - dao.add(new RyaStatement(subj1, pred1, three)); - dao.add(new RyaStatement(subj1, pred2, one)); - dao.add(new RyaStatement(subj1, pred2, two)); - dao.add(new RyaStatement(subj1, pred2, three)); - dao.add(new RyaStatement(subj2, pred1, one)); - dao.add(new RyaStatement(subj2, pred1, two)); - dao.add(new RyaStatement(subj2, pred1, three)); - dao.add(new RyaStatement(subj2, pred2, one)); - dao.add(new RyaStatement(subj2, pred2, two)); - dao.add(new RyaStatement(subj2, pred2, three)); - dao.add(new RyaStatement(subj3, pred1, one)); - dao.add(new RyaStatement(subj3, pred1, two)); - dao.add(new RyaStatement(subj3, pred1, three)); - dao.add(new RyaStatement(subj3, pred2, one)); - dao.add(new RyaStatement(subj3, pred2, two)); - dao.add(new RyaStatement(subj3, pred2, three)); - dao.add(new RyaStatement(subj4, pred1, one)); - dao.add(new RyaStatement(subj4, pred1, two)); - dao.add(new RyaStatement(subj4, pred1, three)); - dao.add(new RyaStatement(subj4, pred2, one)); - dao.add(new RyaStatement(subj4, pred2, two)); - dao.add(new RyaStatement(subj4, pred2, three)); - - - //1 join - HashJoin ijoin = new HashJoin(dao.getQueryEngine()); - CloseableIteration join = ijoin.join(null, pred1, pred2); - - int count = 0; - while (join.hasNext()) { - RyaStatement next = join.next(); - count++; - } - assertEquals(12, count); - join.close(); - } -} diff --git a/sail/src/test/java/mvm/rya/IterativeJoinTest.java b/sail/src/test/java/mvm/rya/IterativeJoinTest.java deleted file mode 100644 index 610b8eb35..000000000 --- a/sail/src/test/java/mvm/rya/IterativeJoinTest.java +++ /dev/null @@ -1,365 +0,0 @@ -package mvm.rya; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.join.IterativeJoin; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashSet; -import java.util.Set; - -import static junit.framework.Assert.*; - -/** - * Date: 7/24/12 - * Time: 5:51 PM - */ -public class IterativeJoinTest { - private AccumuloRyaDAO dao; - static String litdupsNS = "urn:test:litdups#"; - private Connector connector; - private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - @Before - public void init() throws Exception { - dao = new AccumuloRyaDAO(); - connector = new MockInstance().getConnector("", ""); - dao.setConnector(connector); - dao.setConf(conf); - dao.init(); - } - - @After - public void destroy() throws Exception { - dao.destroy(); - } - - @Test - public void testSimpleIterativeJoin() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - - //1 join - IterativeJoin iterJoin = new IterativeJoin(dao.getQueryEngine()); - CloseableIteration join = iterJoin.join(null, new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two)); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj3)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testSimpleIterativeJoinMultiWay() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj2, pred, three)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, three)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - dao.add(new RyaStatement(subj4, pred, four)); - - //1 join - IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine()); - CloseableIteration join = iterativeJoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj3)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testIterativeJoinMultiWay() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj2, pred, three)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - dao.add(new RyaStatement(subj4, pred, four)); - - //1 join - IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine()); - CloseableIteration join = iterativeJoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testIterativeJoinMultiWayNone() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - - //1 join - IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine()); - CloseableIteration join = iterativeJoin.join(null, - new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - assertFalse(join.hasNext()); - join.close(); - } - - @Test - public void testIterativeJoinMultiWayNone2() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - - //1 join - IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine()); - CloseableIteration join = iterativeJoin.join(null, new RdfCloudTripleStoreUtils.CustomEntry(pred, one), - new RdfCloudTripleStoreUtils.CustomEntry(pred, two), - new RdfCloudTripleStoreUtils.CustomEntry(pred, three), - new RdfCloudTripleStoreUtils.CustomEntry(pred, four) - ); - - assertFalse(join.hasNext()); - join.close(); - } - - @Test - public void testSimpleIterativeJoinPredicateOnly() throws Exception { - //add data - RyaURI pred1 = new RyaURI(litdupsNS, "pred1"); - RyaURI pred2 = new RyaURI(litdupsNS, "pred2"); - RyaType one = new RyaType("1"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred1, one)); - dao.add(new RyaStatement(subj1, pred2, one)); - dao.add(new RyaStatement(subj2, pred1, one)); - dao.add(new RyaStatement(subj2, pred2, one)); - dao.add(new RyaStatement(subj3, pred1, one)); - dao.add(new RyaStatement(subj3, pred2, one)); - dao.add(new RyaStatement(subj4, pred1, one)); - dao.add(new RyaStatement(subj4, pred2, one)); - - - //1 join - IterativeJoin ijoin = new IterativeJoin(dao.getQueryEngine()); - CloseableIteration join = ijoin.join(null, pred1, pred2); - - int count = 0; - while (join.hasNext()) { - RyaStatement next = join.next(); - count++; - } - assertEquals(4, count); - join.close(); - } - - @Test - public void testSimpleIterativeJoinPredicateOnly2() throws Exception { - //add data - RyaURI pred1 = new RyaURI(litdupsNS, "pred1"); - RyaURI pred2 = new RyaURI(litdupsNS, "pred2"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred1, one)); - dao.add(new RyaStatement(subj1, pred1, two)); - dao.add(new RyaStatement(subj1, pred1, three)); - dao.add(new RyaStatement(subj1, pred2, one)); - dao.add(new RyaStatement(subj1, pred2, two)); - dao.add(new RyaStatement(subj1, pred2, three)); - dao.add(new RyaStatement(subj2, pred1, one)); - dao.add(new RyaStatement(subj2, pred1, two)); - dao.add(new RyaStatement(subj2, pred1, three)); - dao.add(new RyaStatement(subj2, pred2, one)); - dao.add(new RyaStatement(subj2, pred2, two)); - dao.add(new RyaStatement(subj2, pred2, three)); - dao.add(new RyaStatement(subj3, pred1, one)); - dao.add(new RyaStatement(subj3, pred1, two)); - dao.add(new RyaStatement(subj3, pred1, three)); - dao.add(new RyaStatement(subj3, pred2, one)); - dao.add(new RyaStatement(subj3, pred2, two)); - dao.add(new RyaStatement(subj3, pred2, three)); - dao.add(new RyaStatement(subj4, pred1, one)); - dao.add(new RyaStatement(subj4, pred1, two)); - dao.add(new RyaStatement(subj4, pred1, three)); - dao.add(new RyaStatement(subj4, pred2, one)); - dao.add(new RyaStatement(subj4, pred2, two)); - dao.add(new RyaStatement(subj4, pred2, three)); - - - //1 join - IterativeJoin ijoin = new IterativeJoin(dao.getQueryEngine()); - CloseableIteration join = ijoin.join(null, pred1, pred2); - - int count = 0; - while (join.hasNext()) { - RyaStatement next = join.next(); - count++; - } - assertEquals(12, count); - join.close(); - } -} diff --git a/sail/src/test/java/mvm/rya/MergeJoinTest.java b/sail/src/test/java/mvm/rya/MergeJoinTest.java deleted file mode 100644 index e4f07c4f3..000000000 --- a/sail/src/test/java/mvm/rya/MergeJoinTest.java +++ /dev/null @@ -1,370 +0,0 @@ -package mvm.rya; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.join.MergeJoin; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; - -import java.util.HashSet; -import java.util.Set; - -import static junit.framework.Assert.*; -import static mvm.rya.api.RdfCloudTripleStoreUtils.CustomEntry; - -/** - * TODO: Move to rya.api when we have proper mock ryaDao - * - * Date: 7/24/12 - * Time: 9:49 AM - */ -public class MergeJoinTest { - - private AccumuloRyaDAO dao; - static String litdupsNS = "urn:test:litdups#"; - private Connector connector; - private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - @Before - public void init() throws Exception { - dao = new AccumuloRyaDAO(); - connector = new MockInstance().getConnector("", ""); - dao.setConnector(connector); - dao.setConf(conf); - dao.init(); - } - - @After - public void destroy() throws Exception { - dao.destroy(); - } - - @Test - public void testSimpleMergeJoin() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - - - //1 join - MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine()); - CloseableIteration join = mergeJoin.join(null, new CustomEntry(pred, one), - new CustomEntry(pred, two)); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj3)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testSimpleMergeJoinPredicateOnly() throws Exception { - //add data - RyaURI pred1 = new RyaURI(litdupsNS, "pred1"); - RyaURI pred2 = new RyaURI(litdupsNS, "pred2"); - RyaType one = new RyaType("1"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred1, one)); - dao.add(new RyaStatement(subj1, pred2, one)); - dao.add(new RyaStatement(subj2, pred1, one)); - dao.add(new RyaStatement(subj2, pred2, one)); - dao.add(new RyaStatement(subj3, pred1, one)); - dao.add(new RyaStatement(subj3, pred2, one)); - dao.add(new RyaStatement(subj4, pred1, one)); - dao.add(new RyaStatement(subj4, pred2, one)); - - - //1 join - MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine()); - CloseableIteration join = mergeJoin.join(null, pred1, pred2); - - int count = 0; - while (join.hasNext()) { - RyaStatement next = join.next(); - count++; - } - assertEquals(4, count); - join.close(); - } - - @Test - public void testSimpleMergeJoinPredicateOnly2() throws Exception { - //add data - RyaURI pred1 = new RyaURI(litdupsNS, "pred1"); - RyaURI pred2 = new RyaURI(litdupsNS, "pred2"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred1, one)); - dao.add(new RyaStatement(subj1, pred1, two)); - dao.add(new RyaStatement(subj1, pred1, three)); - dao.add(new RyaStatement(subj1, pred2, one)); - dao.add(new RyaStatement(subj1, pred2, two)); - dao.add(new RyaStatement(subj1, pred2, three)); - dao.add(new RyaStatement(subj2, pred1, one)); - dao.add(new RyaStatement(subj2, pred1, two)); - dao.add(new RyaStatement(subj2, pred1, three)); - dao.add(new RyaStatement(subj2, pred2, one)); - dao.add(new RyaStatement(subj2, pred2, two)); - dao.add(new RyaStatement(subj2, pred2, three)); - dao.add(new RyaStatement(subj3, pred1, one)); - dao.add(new RyaStatement(subj3, pred1, two)); - dao.add(new RyaStatement(subj3, pred1, three)); - dao.add(new RyaStatement(subj3, pred2, one)); - dao.add(new RyaStatement(subj3, pred2, two)); - dao.add(new RyaStatement(subj3, pred2, three)); - dao.add(new RyaStatement(subj4, pred1, one)); - dao.add(new RyaStatement(subj4, pred1, two)); - dao.add(new RyaStatement(subj4, pred1, three)); - dao.add(new RyaStatement(subj4, pred2, one)); - dao.add(new RyaStatement(subj4, pred2, two)); - dao.add(new RyaStatement(subj4, pred2, three)); - - - //1 join - MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine()); - CloseableIteration join = mergeJoin.join(null, pred1, pred2); - - int count = 0; - while (join.hasNext()) { - RyaStatement next = join.next(); - count++; - } - assertEquals(12, count); - join.close(); - } - - @Test - public void testSimpleMergeJoinMultiWay() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj2, pred, three)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, three)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - dao.add(new RyaStatement(subj4, pred, four)); - - - //1 join - MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine()); - CloseableIteration join = mergeJoin.join(null, new CustomEntry(pred, one), - new CustomEntry(pred, two), - new CustomEntry(pred, three), - new CustomEntry(pred, four) - ); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj3)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testMergeJoinMultiWay() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, two)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, two)); - dao.add(new RyaStatement(subj2, pred, three)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, one)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - dao.add(new RyaStatement(subj4, pred, four)); - - - //1 join - MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine()); - CloseableIteration join = mergeJoin.join(null, new CustomEntry(pred, one), - new CustomEntry(pred, two), - new CustomEntry(pred, three), - new CustomEntry(pred, four) - ); - - Set uris = new HashSet(); - while (join.hasNext()) { - uris.add(join.next()); - } - assertTrue(uris.contains(subj1)); - assertTrue(uris.contains(subj2)); - assertTrue(uris.contains(subj4)); - join.close(); - } - - @Test - public void testMergeJoinMultiWayNone() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, three)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - dao.add(new RyaStatement(subj4, pred, three)); - - - //1 join - MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine()); - CloseableIteration join = mergeJoin.join(null, new CustomEntry(pred, one), - new CustomEntry(pred, two), - new CustomEntry(pred, three), - new CustomEntry(pred, four) - ); - - assertFalse(join.hasNext()); - join.close(); - } - - @Test - public void testMergeJoinMultiWayNone2() throws Exception { - //add data - RyaURI pred = new RyaURI(litdupsNS, "pred1"); - RyaType zero = new RyaType("0"); - RyaType one = new RyaType("1"); - RyaType two = new RyaType("2"); - RyaType three = new RyaType("3"); - RyaType four = new RyaType("4"); - RyaURI subj1 = new RyaURI(litdupsNS, "subj1"); - RyaURI subj2 = new RyaURI(litdupsNS, "subj2"); - RyaURI subj3 = new RyaURI(litdupsNS, "subj3"); - RyaURI subj4 = new RyaURI(litdupsNS, "subj4"); - - dao.add(new RyaStatement(subj1, pred, one)); - dao.add(new RyaStatement(subj1, pred, four)); - dao.add(new RyaStatement(subj2, pred, zero)); - dao.add(new RyaStatement(subj2, pred, one)); - dao.add(new RyaStatement(subj2, pred, four)); - dao.add(new RyaStatement(subj3, pred, two)); - dao.add(new RyaStatement(subj3, pred, four)); - dao.add(new RyaStatement(subj4, pred, one)); - dao.add(new RyaStatement(subj4, pred, two)); - - - //1 join - MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine()); - CloseableIteration join = mergeJoin.join(null, new CustomEntry(pred, one), - new CustomEntry(pred, two), - new CustomEntry(pred, three), - new CustomEntry(pred, four) - ); - - assertFalse(join.hasNext()); - join.close(); - } -} diff --git a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java deleted file mode 100644 index 31efa3acf..000000000 --- a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java +++ /dev/null @@ -1,1363 +0,0 @@ -package mvm.rya; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.api.RdfCloudTripleStoreConstants.NAMESPACE; - -import java.io.InputStream; -import java.util.List; - -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; -import mvm.rya.rdftriplestore.RyaSailRepository; -import mvm.rya.rdftriplestore.inference.InferenceEngine; -import mvm.rya.rdftriplestore.namespace.NamespaceManager; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.junit.Ignore; -import org.openrdf.model.Literal; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.Update; -import org.openrdf.repository.Repository; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryResult; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.rio.RDFFormat; - -/** - * Class RdfCloudTripleStoreConnectionTest - * Date: Mar 3, 2011 - * Time: 12:03:29 PM - */ -public class RdfCloudTripleStoreConnectionTest extends TestCase { - private Repository repository; - ValueFactoryImpl vf = new ValueFactoryImpl(); - private InferenceEngine internalInferenceEngine; - - static String litdupsNS = "urn:test:litdups#"; - URI cpu = vf.createURI(litdupsNS, "cpu"); - protected RdfCloudTripleStore store; - - public void setUp() throws Exception { - super.setUp(); - store = new MockRdfCloudStore(); -// store.setDisplayQueryPlan(true); -// store.setInferencing(false); - NamespaceManager nm = new NamespaceManager(store.getRyaDAO(), store.getConf()); - store.setNamespaceManager(nm); - repository = new RyaSailRepository(store); - repository.initialize(); - } - - public void tearDown() throws Exception { - super.tearDown(); - repository.shutDown(); - } - - public void testAddStatement() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - conn.add(cpu, loadPerc, uri1); - conn.commit(); - - RepositoryResult result = conn.getStatements(cpu, loadPerc, null, true); - int count = 0; - while (result.hasNext()) { - count++; - result.next(); - } - result.close(); - assertEquals(1, count); - - //clean up - conn.remove(cpu, loadPerc, uri1); - -// //test removal - result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]); - count = 0; - while (result.hasNext()) { - count++; - result.next(); - } - result.close(); - assertEquals(0, count); - - conn.close(); - } - -// public void testAddAuth() throws Exception { -// RepositoryConnection conn = repository.getConnection(); -// URI cpu = vf.createURI(litdupsNS, "cpu"); -// URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -// URI uri1 = vf.createURI(litdupsNS, "uri1"); -// URI uri2 = vf.createURI(litdupsNS, "uri2"); -// URI uri3 = vf.createURI(litdupsNS, "uri3"); -// URI auth1 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "1"); -// URI auth2 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "2"); -// URI auth3 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "3"); -// conn.add(cpu, loadPerc, uri1, auth1, auth2, auth3); -// conn.add(cpu, loadPerc, uri2, auth2, auth3); -// conn.add(cpu, loadPerc, uri3, auth3); -// conn.commit(); -// -// //query with no auth -// RepositoryResult result = conn.getStatements(cpu, loadPerc, null, true); -// int count = 0; -// while (result.hasNext()) { -// count++; -// result.next(); -// } -// assertEquals(0, count); -// result.close(); -// -// String query = "select * where {" + -// "<" + cpu.toString() + "> ?p ?o1." + -// "}"; -// TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("2")); -// CountTupleHandler cth = new CountTupleHandler(); -// tupleQuery.evaluate(cth); -// assertEquals(2, cth.getCount()); -// -// conn.close(); -// } - - public void testEvaluate() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - conn.add(cpu, loadPerc, uri1); - conn.commit(); - - String query = "select * where {" + - "?x <" + loadPerc.stringValue() + "> ?o1." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - assertEquals(cth.getCount(), 1); - conn.close(); - } - - public void testEvaluateMultiLine() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - URI pred2 = vf.createURI(litdupsNS, "pred2"); - URI uri2 = vf.createURI(litdupsNS, "uri2"); - conn.add(cpu, loadPerc, uri1); - conn.add(cpu, pred2, uri2); - conn.commit(); - - String query = "select * where {" + - "?x <" + loadPerc.stringValue() + "> ?o1." + - "?x <" + pred2.stringValue() + "> ?o2." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, RdfCloudTripleStoreConstants.VALUE_FACTORY.createLiteral(true)); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(cth.getCount(), 1); - } - - public void testPOObjRange() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - Literal six = vf.createLiteral("6"); - Literal sev = vf.createLiteral("7"); - Literal ten = vf.createLiteral("10"); - conn.add(cpu, loadPerc, six); - conn.add(cpu, loadPerc, sev); - conn.add(cpu, loadPerc, ten); - conn.commit(); - - String query = "PREFIX mvm: <" + NAMESPACE + ">\n" + - "select * where {" + - "?x <" + loadPerc.stringValue() + "> ?o.\n" + - "FILTER(mvm:range(?o, '6', '8'))." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(2, cth.getCount()); - } - - public void testPOPredRange() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc1"); - URI loadPerc2 = vf.createURI(litdupsNS, "loadPerc2"); - URI loadPerc3 = vf.createURI(litdupsNS, "loadPerc3"); - URI loadPerc4 = vf.createURI(litdupsNS, "loadPerc4"); - Literal six = vf.createLiteral("6"); - Literal sev = vf.createLiteral("7"); - Literal ten = vf.createLiteral("10"); - conn.add(cpu, loadPerc, six); - conn.add(cpu, loadPerc2, sev); - conn.add(cpu, loadPerc4, ten); - conn.commit(); - - String query = "PREFIX mvm: <" + NAMESPACE + ">\n" + - "select * where {" + - "?x ?p ?o.\n" + - "FILTER(mvm:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(cth.getCount(), 2); - } - - public void testSPOPredRange() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc1"); - URI loadPerc2 = vf.createURI(litdupsNS, "loadPerc2"); - URI loadPerc3 = vf.createURI(litdupsNS, "loadPerc3"); - URI loadPerc4 = vf.createURI(litdupsNS, "loadPerc4"); - Literal six = vf.createLiteral("6"); - Literal sev = vf.createLiteral("7"); - Literal ten = vf.createLiteral("10"); - conn.add(cpu, loadPerc, six); - conn.add(cpu, loadPerc2, sev); - conn.add(cpu, loadPerc4, ten); - conn.commit(); - - String query = "PREFIX mvm: <" + NAMESPACE + ">\n" + - "select * where {" + - "<" + cpu.stringValue() + "> ?p ?o.\n" + - "FILTER(mvm:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(2, cth.getCount()); - } - - public void testSPOSubjRange() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI cpu2 = vf.createURI(litdupsNS, "cpu2"); - URI cpu3 = vf.createURI(litdupsNS, "cpu3"); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - Literal six = vf.createLiteral("6"); - Literal sev = vf.createLiteral("7"); - Literal ten = vf.createLiteral("10"); - conn.add(cpu, loadPerc, six); - conn.add(cpu2, loadPerc, sev); - conn.add(cpu3, loadPerc, ten); - conn.commit(); - - String query = "PREFIX mvm: <" + NAMESPACE + ">\n" + - "select * where {" + - "?s ?p ?o.\n" + - "FILTER(mvm:range(?s, <" + cpu.stringValue() + ">, <" + cpu2.stringValue() + ">))." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(cth.getCount(), 2); - } - - public void testSPOObjRange() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - Literal six = vf.createLiteral("6"); - Literal sev = vf.createLiteral("7"); - Literal ten = vf.createLiteral("10"); - conn.add(cpu, loadPerc, six); - conn.add(cpu, loadPerc, sev); - conn.add(cpu, loadPerc, ten); - conn.commit(); - - String query = "PREFIX mvm: <" + NAMESPACE + ">\n" + - "select * where {" + - "<" + cpu.stringValue() + "> <" + loadPerc.stringValue() + "> ?o.\n" + - "FILTER(mvm:range(?o, '6', '8'))." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(cth.getCount(), 2); - } - - public void testOSPObjRange() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - Literal six = vf.createLiteral("6"); - Literal sev = vf.createLiteral("7"); - Literal ten = vf.createLiteral("10"); - conn.add(cpu, loadPerc, six); - conn.add(cpu, loadPerc, sev); - conn.add(cpu, loadPerc, ten); - conn.commit(); - - String query = "PREFIX mvm: <" + NAMESPACE + ">\n" + - "select * where {" + - "?s ?p ?o.\n" + - "FILTER(mvm:range(?o, '6', '8'))." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(cth.getCount(), 2); - } - - public void testRegexFilter() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI testClass = vf.createURI(litdupsNS, "test"); - Literal six = vf.createLiteral("6"); - Literal sev = vf.createLiteral("7"); - Literal ten = vf.createLiteral("10"); - conn.add(cpu, loadPerc, six); - conn.add(cpu, loadPerc, sev); - conn.add(cpu, loadPerc, ten); - conn.add(cpu, RDF.TYPE, testClass); - conn.commit(); - - String query = "PREFIX mvm: <" + NAMESPACE + ">\n" + - "select * where {" + - String.format("<%s> ?p ?o.\n", cpu.stringValue()) + - "FILTER(regex(?o, '^1'))." + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler cth = new CountTupleHandler(); - tupleQuery.evaluate(cth); - conn.close(); - assertEquals(cth.getCount(), 1); - } - - public void testMMRTS152() throws Exception { - RepositoryConnection conn = repository.getConnection(); - URI loadPerc = vf.createURI(litdupsNS, "testPred"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - conn.add(cpu, loadPerc, uri1); - conn.commit(); - - RepositoryResult result = conn.getStatements(cpu, loadPerc, null, false, new Resource[0]); -// RdfCloudTripleStoreCollectionStatementsIterator iterator = new RdfCloudTripleStoreCollectionStatementsIterator( -// cpu, loadPerc, null, store.connector, -// vf, new Configuration(), null); - - while (result.hasNext()) { - assertTrue(result.hasNext()); - assertNotNull(result.next()); - } - - conn.close(); - } - - public void testDuplicateLiterals() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - Literal lit1 = vf.createLiteral(0.0); - Literal lit2 = vf.createLiteral(0.0); - Literal lit3 = vf.createLiteral(0.0); - - conn.add(cpu, loadPerc, lit1); - conn.add(cpu, loadPerc, lit2); - conn.add(cpu, loadPerc, lit3); - conn.commit(); - - RepositoryResult result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]); - int count = 0; - while (result.hasNext()) { - count++; - result.next(); - } - result.close(); - assertEquals(1, count); - - //clean up - conn.remove(cpu, loadPerc, lit1); - conn.close(); - } - - public void testNotDuplicateUris() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - URI uri1 = vf.createURI(litdupsNS, "uri1"); - URI uri2 = vf.createURI(litdupsNS, "uri1"); - URI uri3 = vf.createURI(litdupsNS, "uri1"); - - conn.add(cpu, loadPerc, uri1); - conn.add(cpu, loadPerc, uri2); - conn.add(cpu, loadPerc, uri3); - conn.commit(); - - RepositoryResult result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]); - int count = 0; - while (result.hasNext()) { - count++; - result.next(); - } - result.close(); - assertEquals(1, count); - - //clean up - conn.remove(cpu, loadPerc, uri1); - conn.close(); - } - - public void testNamespaceUsage() throws Exception { - RepositoryConnection conn = repository.getConnection(); - conn.setNamespace("lit", litdupsNS); - URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); - final URI uri1 = vf.createURI(litdupsNS, "uri1"); - conn.add(cpu, loadPerc, uri1); - conn.commit(); - - String query = "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {lit:cpu lit:loadPerc ?o.}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.evaluate(new TupleQueryResultHandler() { - - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - assertTrue(uri1.toString().equals(bindingSet.getBinding("o").getValue().stringValue())); - } - - @Override - public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List paramList) throws QueryResultHandlerException { - } - }); - conn.close(); - } - - public void testSubPropertyOf() throws Exception { - if(internalInferenceEngine == null) return; //infer not supported; - - RepositoryConnection conn = repository.getConnection(); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "memberOf"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "associatedWith"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "gradDegreeFrom"), vf.createURI(litdupsNS, "Yale"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "ProfessorC"), vf.createURI(litdupsNS, "memberOf"), vf.createURI(litdupsNS, "Harvard"))); - conn.commit(); - conn.close(); - - internalInferenceEngine.refreshGraph(); - - conn = repository.getConnection(); - - String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:degreeFrom lit:Harvard.}"; - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:memberOf lit:Harvard.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:associatedWith ?o.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:gradDegreeFrom lit:Yale.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - conn.close(); - } - - public void testEquivPropOf() throws Exception { - if(internalInferenceEngine == null) return; //infer not supported; - - RepositoryConnection conn = repository.getConnection(); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), OWL.EQUIVALENTPROPERTY, vf.createURI(litdupsNS, "ugradDegreeFrom"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "ugradDegreeFrom"), vf.createURI(litdupsNS, "Harvard"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradC"), vf.createURI(litdupsNS, "ugraduateDegreeFrom"), vf.createURI(litdupsNS, "Harvard"))); - conn.commit(); - conn.close(); - - internalInferenceEngine.refreshGraph(); - - conn = repository.getConnection(); - - String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:ugradDegreeFrom lit:Harvard.}"; - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - conn.close(); - } - - public void testSymmPropOf() throws Exception { - if(internalInferenceEngine == null) return; //infer not supported; - - RepositoryConnection conn = repository.getConnection(); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY)); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "Bob"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "James"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff"))); - conn.commit(); - conn.close(); - - internalInferenceEngine.refreshGraph(); - - conn = repository.getConnection(); - - String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:friendOf lit:Bob.}"; - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:friendOf lit:James.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:friendOf lit:Jeff.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - conn.close(); - } - - public void testTransitiveProp() throws Exception { - if(internalInferenceEngine == null) return; //infer not supported; - - RepositoryConnection conn = repository.getConnection(); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY)); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "Queens"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NYC"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "NYC"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NY"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "NY"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "US"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "US"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NorthAmerica"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "NorthAmerica"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "World"))); - conn.commit(); - conn.close(); - - internalInferenceEngine.refreshGraph(); - - conn = repository.getConnection(); - - String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:subRegionOf lit:NorthAmerica.}"; - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(4, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:subRegionOf lit:NY.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {lit:Queens lit:subRegionOf ?s.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(5, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {lit:NY lit:subRegionOf ?s.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); - - conn.close(); - } - - public void testInverseOf() throws Exception { - if(internalInferenceEngine == null) return; //infer not supported; - - RepositoryConnection conn = repository.getConnection(); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createURI(litdupsNS, "hasAlumnus"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "Harvard"), vf.createURI(litdupsNS, "hasAlumnus"), vf.createURI(litdupsNS, "AlumC"))); - conn.commit(); - conn.close(); - - internalInferenceEngine.refreshGraph(); - - conn = repository.getConnection(); - - String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {lit:Harvard lit:hasAlumnus ?s.}"; - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s lit:degreeFrom lit:Harvard.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); - - conn.close(); - } - - public void testSubClassOf() throws Exception { - if(internalInferenceEngine == null) return; //infer not supported; - - RepositoryConnection conn = repository.getConnection(); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Student"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "Student"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Person"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), RDF.TYPE, vf.createURI(litdupsNS, "UndergraduateStudent"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB"), RDF.TYPE, vf.createURI(litdupsNS, "Student"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "PersonC"), RDF.TYPE, vf.createURI(litdupsNS, "Person"))); - conn.commit(); - conn.close(); - - internalInferenceEngine.refreshGraph(); - - conn = repository.getConnection(); - - //simple api first - RepositoryResult person = conn.getStatements(null, RDF.TYPE, vf.createURI(litdupsNS, "Person"), true); - int count = 0; - while (person.hasNext()) { - count++; - person.next(); - } - person.close(); - assertEquals(3, count); - - String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s rdf:type lit:Person.}"; - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s rdf:type lit:Student.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select * where {?s rdf:type lit:UndergraduateStudent.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - conn.close(); - } - - public void testSameAs() throws Exception { - if(internalInferenceEngine == null) return; //infer not supported; - - RepositoryConnection conn = repository.getConnection(); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA2"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA3"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB2"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB3"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), vf.createURI(litdupsNS, "pred1"), vf.createURI(litdupsNS, "StudentB3"))); - conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), vf.createURI(litdupsNS, "pred2"), vf.createURI(litdupsNS, "StudentA3"))); - conn.commit(); - conn.close(); - - internalInferenceEngine.refreshGraph(); - - conn = repository.getConnection(); - - // query where finds sameAs for obj, pred specified - String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select ?s where {?s lit:pred1 lit:StudentB2.}"; - - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - // query where finds sameAs for obj only specified - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select ?s where {?s ?p lit:StudentB2.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); // including sameAs assertions - - // query where finds sameAs for subj, pred specified - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select ?s where {lit:StudentB2 lit:pred2 ?s.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); // including sameAs assertions - - // query where finds sameAs for subj only specified - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select ?s where {lit:StudentB2 ?p ?s.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); // including sameAs assertions - - // query where finds sameAs for subj, obj specified - query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + - "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + - "PREFIX lit: <" + litdupsNS + ">\n" + - "select ?s where {lit:StudentB2 ?s lit:StudentA2.}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - conn.close(); - } - - public void testNamedGraphLoad() throws Exception { - InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig"); - assertNotNull(stream); - RepositoryConnection conn = repository.getConnection(); - conn.add(stream, "", RDFFormat.TRIG); - conn.commit(); - - String query = "PREFIX ex: \n" + - "PREFIX voc: \n" + - "PREFIX foaf: \n" + - "PREFIX rdfs: \n" + - "\n" + - "SELECT * \n" + -// "FROM NAMED \n" + - "WHERE\n" + - "{\n" + - " GRAPH ex:G1\n" + - " {\n" + - " ?m voc:name ?name ;\n" + - " voc:homepage ?hp .\n" + - " } .\n" + - " GRAPH ex:G2\n" + - " {\n" + - " ?m voc:hasSkill ?skill .\n" + - " } .\n" + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); -// tupleQuery.evaluate(new PrintTupleHandler()); - assertEquals(1, tupleHandler.getCount()); - - query = "PREFIX ex: \n" + - "PREFIX voc: \n" + - "PREFIX swp: \n" + - "PREFIX foaf: \n" + - "PREFIX rdfs: \n" + - "\n" + - "SELECT * \n" + - "WHERE\n" + - "{\n" + - " GRAPH ex:G3\n" + - " {\n" + - " ?g swp:assertedBy ?w .\n" + - " ?w swp:authority ex:Tom .\n" + - " } .\n" + - " GRAPH ?g\n" + - " {\n" + - " ?m voc:name ?name .\n" + - " } .\n" + - "}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - query = "PREFIX ex: \n" + - "PREFIX voc: \n" + - "PREFIX swp: \n" + - "PREFIX foaf: \n" + - "PREFIX rdfs: \n" + - "\n" + - "SELECT * \n" + - "WHERE\n" + - "{\n" + - " GRAPH ?g\n" + - " {\n" + - " ?m voc:name ?name .\n" + - " } .\n" + - "}"; - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.setBinding(BINDING_DISP_QUERYPLAN, VALUE_FACTORY.createLiteral(true)); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - conn.close(); - } - - public void testNamedGraphLoad2() throws Exception { - InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig"); - assertNotNull(stream); - RepositoryConnection conn = repository.getConnection(); - conn.add(stream, "", RDFFormat.TRIG); - conn.commit(); - - RepositoryResult statements = conn.getStatements(null, vf.createURI("http://www.example.org/vocabulary#name"), null, true, vf.createURI("http://www.example.org/exampleDocument#G1")); - int count = 0; - while (statements.hasNext()) { - statements.next(); - count++; - } - statements.close(); - assertEquals(1, count); - - conn.close(); - } - -// public void testNamedGraphLoadWInlineAuth() throws Exception { -// InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig"); -// assertNotNull(stream); -// URI auth1 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "1"); -// RepositoryConnection conn = repository.getConnection(); -// conn.add(stream, "", RDFFormat.TRIG, auth1); -// conn.commit(); -// -// String query = "PREFIX ex: \n" + -// "PREFIX voc: \n" + -// "PREFIX foaf: \n" + -// "PREFIX rdfs: \n" + -// "\n" + -// "SELECT * \n" + -// "WHERE\n" + -// "{\n" + -// " GRAPH ex:G1\n" + -// " {\n" + -// " ?m voc:name ?name ;\n" + -// " voc:homepage ?hp .\n" + -// " } .\n" + -// " GRAPH ex:G2\n" + -// " {\n" + -// " ?m voc:hasSkill ?skill .\n" + -// " } .\n" + -// "}"; -// TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("1")); -// CountTupleHandler tupleHandler = new CountTupleHandler(); -// tupleQuery.evaluate(tupleHandler); -// assertEquals(1, tupleHandler.getCount()); -// -// query = "PREFIX ex: \n" + -// "PREFIX voc: \n" + -// "PREFIX swp: \n" + -// "PREFIX foaf: \n" + -// "PREFIX rdfs: \n" + -// "\n" + -// "SELECT * \n" + -// "WHERE\n" + -// "{\n" + -// " GRAPH ex:G3\n" + -// " {\n" + -// " ?g swp:assertedBy ?w .\n" + -// " ?w swp:authority ex:Tom .\n" + -// " } .\n" + -// " GRAPH ?g\n" + -// " {\n" + -// " ?m voc:name ?name .\n" + -// " } .\n" + -// "}"; -// -// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleHandler = new CountTupleHandler(); -// tupleQuery.evaluate(tupleHandler); -// assertEquals(0, tupleHandler.getCount()); -// -// conn.close(); -// } - - public void testNamedGraphLoadWAuth() throws Exception { - InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig"); - assertNotNull(stream); - - RdfCloudTripleStore tstore = new MockRdfCloudStore(); - NamespaceManager nm = new NamespaceManager(tstore.getRyaDAO(), tstore.getConf()); - tstore.setNamespaceManager(nm); - SailRepository repo = new SailRepository(tstore); - tstore.getRyaDAO().getConf().setCv("1|2"); - repo.initialize(); - - RepositoryConnection conn = repo.getConnection(); - conn.add(stream, "", RDFFormat.TRIG); - conn.commit(); - - String query = "PREFIX ex: \n" + - "PREFIX voc: \n" + - "PREFIX foaf: \n" + - "PREFIX rdfs: \n" + - "\n" + - "SELECT * \n" + -// "FROM NAMED \n" + - "WHERE\n" + - "{\n" + - " GRAPH ex:G1\n" + - " {\n" + - " ?m voc:name ?name ;\n" + - " voc:homepage ?hp .\n" + - " } .\n" + - " GRAPH ex:G2\n" + - " {\n" + - " ?m voc:hasSkill ?skill .\n" + - " } .\n" + - "}"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("2")); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); //no auth - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(0, tupleHandler.getCount()); - - conn.close(); - - repo.shutDown(); - } - - public void testInsertDeleteData() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - String insert = "PREFIX dc: \n" + - "INSERT DATA\n" + - "{ dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}"; - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - String query = "PREFIX dc: \n" + - "select * where { ?p ?o. }"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - String delete = "PREFIX dc: \n" + - "\n" + - "DELETE DATA\n" + - "{ dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}"; - update = conn.prepareUpdate(QueryLanguage.SPARQL, delete); - update.execute(); - - query = "PREFIX dc: \n" + - "select * where { ?p ?o. }"; - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(0, tupleHandler.getCount()); - - conn.close(); - } - - public void testUpdateData() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - String insert = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "INSERT DATA\n" + - "{ GRAPH ex:G1 {\n" + - " dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}\n" + - "}"; - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - String query = "PREFIX dc: \n" + - "select * where { ?p ?o. }"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - String insdel = "PREFIX dc: \n" + - "\n" + - "WITH \n" + - "DELETE { ?book dc:title ?title }\n" + - "INSERT { ?book dc:title \"A newer book\"." + - " ?book dc:add \"Additional Info\" }\n" + - "WHERE\n" + - " { ?book dc:creator \"A.N.Other\" ;\n" + - " dc:title ?title .\n" + - " }"; - update = conn.prepareUpdate(QueryLanguage.SPARQL, insdel); - update.execute(); - - query = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "select * where { GRAPH ex:G1 { ?p ?o. } }"; - tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); - - conn.close(); - } - - public void testClearGraph() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - String insert = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "INSERT DATA\n" + - "{ GRAPH ex:G1 {\n" + - " dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}\n" + - "}"; - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - insert = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "INSERT DATA\n" + - "{ GRAPH ex:G2 {\n" + - " dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}\n" + - "}"; - update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - String query = "PREFIX dc: \n" + - "select * where { ?p ?o. }"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(4, tupleHandler.getCount()); - - tupleHandler = new CountTupleHandler(); - conn.clear(new URIImpl("http://example/addresses#G2")); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - tupleHandler = new CountTupleHandler(); - conn.clear(new URIImpl("http://example/addresses#G1")); - tupleQuery.evaluate(tupleHandler); - assertEquals(0, tupleHandler.getCount()); - - conn.close(); - } - - public void testClearAllGraph() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - String insert = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "INSERT DATA\n" + - "{ GRAPH ex:G1 {\n" + - " dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}\n" + - "}"; - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - insert = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "INSERT DATA\n" + - "{ GRAPH ex:G2 {\n" + - " dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}\n" + - "}"; - update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - String query = "PREFIX dc: \n" + - "select * where { ?p ?o. }"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(4, tupleHandler.getCount()); - - tupleHandler = new CountTupleHandler(); - conn.clear(); - tupleQuery.evaluate(tupleHandler); - assertEquals(0, tupleHandler.getCount()); - - conn.close(); - } - - public void testDropGraph() throws Exception { - RepositoryConnection conn = repository.getConnection(); - - String insert = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "INSERT DATA\n" + - "{ GRAPH ex:G1 {\n" + - " dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}\n" + - "}"; - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - insert = "PREFIX dc: \n" + - "PREFIX ex: \n" + - "INSERT DATA\n" + - "{ GRAPH ex:G2 {\n" + - " dc:title \"A new book\" ;\n" + - " dc:creator \"A.N.Other\" .\n" + - "}\n" + - "}"; - update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); - update.execute(); - - String query = "PREFIX dc: \n" + - "select * where { ?p ?o. }"; - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(4, tupleHandler.getCount()); - - tupleHandler = new CountTupleHandler(); - String drop = "PREFIX ex: \n" + - "DROP GRAPH ex:G2 "; - update = conn.prepareUpdate(QueryLanguage.SPARQL, drop); - update.execute(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - - tupleHandler = new CountTupleHandler(); - drop = "PREFIX ex: \n" + - "DROP GRAPH ex:G1 "; - update = conn.prepareUpdate(QueryLanguage.SPARQL, drop); - update.execute(); - tupleQuery.evaluate(tupleHandler); - assertEquals(0, tupleHandler.getCount()); - - conn.close(); - } - - public static class CountTupleHandler implements TupleQueryResultHandler { - - int count = 0; - - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - count++; - } - - public int getCount() { - return count; - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - } - } - - private static class PrintTupleHandler implements TupleQueryResultHandler { - - - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - System.out.println(bindingSet); - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - } - } - - public class MockRdfCloudStore extends RdfCloudTripleStore { - - public MockRdfCloudStore() { - super(); - Instance instance = new MockInstance(); - try { - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - setConf(conf); - Connector connector = instance.getConnector("", ""); - AccumuloRyaDAO cdao = new AccumuloRyaDAO(); - cdao.setConf(conf); - cdao.setConnector(connector); - setRyaDAO(cdao); - inferenceEngine = new InferenceEngine(); - inferenceEngine.setRyaDAO(cdao); - inferenceEngine.setRefreshGraphSchedule(5000); //every 5 sec - inferenceEngine.setConf(conf); - setInferenceEngine(inferenceEngine); - internalInferenceEngine = inferenceEngine; - } catch (Exception e) { - e.printStackTrace(); - } - } - } -} diff --git a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java deleted file mode 100644 index eee6bcefe..000000000 --- a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java +++ /dev/null @@ -1,699 +0,0 @@ -package mvm.rya; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.openrdf.model.Namespace; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.query.*; -import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.RepositoryResult; -import org.openrdf.repository.sail.SailRepository; -import org.openrdf.repository.sail.SailRepositoryConnection; - -import javax.xml.datatype.DatatypeConfigurationException; -import javax.xml.datatype.DatatypeFactory; -import java.util.GregorianCalendar; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * Class PartitionConnectionTest - * Date: Jul 6, 2011 - * Time: 5:24:07 PM - */ -public class RdfCloudTripleStoreTest extends TestCase { - public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len - public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; - public static final String HBNAMESPACE = "http://here/2010/tracked-data-provenance/heartbeat/ns#"; - public static final String HB_TIMESTAMP = HBNAMESPACE + "timestamp"; - - private SailRepository repository; - private SailRepositoryConnection connection; - - ValueFactory vf = ValueFactoryImpl.getInstance(); - - private String objectUuid = "objectuuid1"; - private String ancestor = "ancestor1"; - private String descendant = "descendant1"; - private static final long START = 1309532965000l; - private static final long END = 1310566686000l; - private Connector connector; - - @Override - protected void setUp() throws Exception { - super.setUp(); - connector = new MockInstance().getConnector("", ""); - - RdfCloudTripleStore sail = new RdfCloudTripleStore(); - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - conf.setTablePrefix("lubm_"); - sail.setConf(conf); - AccumuloRyaDAO crdfdao = new AccumuloRyaDAO(); - crdfdao.setConnector(connector); - crdfdao.setConf(conf); - sail.setRyaDAO(crdfdao); - - repository = new SailRepository(sail); - repository.initialize(); - connection = repository.getConnection(); - - loadData(); - } - - private void loadData() throws RepositoryException, DatatypeConfigurationException { - connection.add(new StatementImpl(vf.createURI(NAMESPACE, objectUuid), vf.createURI(NAMESPACE, "name"), vf.createLiteral("objUuid"))); - //created - String uuid = "uuid1"; - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, objectUuid))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit1"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit2"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit3"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit4"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit1"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit2"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit3"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0)))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0)))); - //clicked - uuid = "uuid2"; - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Clicked"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "clickedItem"), vf.createURI(NAMESPACE, objectUuid))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:B"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 2, 0, 0, 0)))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 3, 0, 0, 0)))); - //deleted - uuid = "uuid3"; - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Deleted"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "deletedItem"), vf.createURI(NAMESPACE, objectUuid))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:C"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 4, 0, 0, 0)))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 5, 0, 0, 0)))); - //dropped - uuid = "uuid4"; - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Dropped"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "droppedItem"), vf.createURI(NAMESPACE, objectUuid))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:D"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 6, 0, 0, 0)))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 7, 0, 0, 0)))); - //received - uuid = "uuid5"; - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Received"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "receivedItem"), vf.createURI(NAMESPACE, objectUuid))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:E"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 8, 0, 0, 0)))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 9, 0, 0, 0)))); - //sent - uuid = "uuid6"; - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Sent"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "sentItem"), vf.createURI(NAMESPACE, objectUuid))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:F"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 10, 0, 0, 0)))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 11, 0, 0, 0)))); - //stored - uuid = "uuid7"; - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Stored"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "storedItem"), vf.createURI(NAMESPACE, objectUuid))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:G"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 12, 0, 0, 0)))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 13, 0, 0, 0)))); - - //derivedFrom - connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "derivedFrom"), vf.createURI(NAMESPACE, ancestor))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "name"), vf.createLiteral("descendantOne"))); - connection.add(new StatementImpl(vf.createURI(NAMESPACE, ancestor), vf.createURI(NAMESPACE, "name"), vf.createLiteral("ancestor1"))); - - //heartbeats - String hbuuid = "hbuuid1"; - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement"))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 1) + ""))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(1 + ""))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:A"))); - connection.add(new StatementImpl(vf.createURI("urn:system:A"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid))); - - hbuuid = "hbuuid2"; - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement"))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 2) + ""))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(2 + ""))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:B"))); - connection.add(new StatementImpl(vf.createURI("urn:system:B"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid))); - - hbuuid = "hbuuid3"; - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement"))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 3) + ""))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(3 + ""))); - connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:C"))); - connection.add(new StatementImpl(vf.createURI("urn:system:C"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid))); - - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj1"))); - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj2"))); - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj3"))); - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj4"))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj1"))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj2"))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj3"))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj4"))); - connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj1"))); - connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj4"))); - - //Foreign Chars - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM))); - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD))); - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_TH))); - connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_RN))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_TH))); - connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_RN))); - connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM))); - connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD))); - - connection.commit(); - } - - private static final String FAN_CH_SIM = "风扇"; - private static final String FAN_CH_TRAD = "風扇"; - private static final String FAN_TH = "แฟน"; - private static final String FAN_RN = "вентилятор"; - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - connection.close(); - repository.shutDown(); - } - - protected String getXmlDate(long ts) throws DatatypeConfigurationException { - GregorianCalendar gregorianCalendar = new GregorianCalendar(); - gregorianCalendar.setTimeInMillis(ts); - //"2011-07-12T05:12:00.000Z"^^xsd:dateTime - return "\"" + vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(gregorianCalendar)).stringValue() + "\"^^xsd:dateTime"; - } - -// public void testScanAll() throws Exception { -// Scanner sc = connector.createScanner("lubm_spo", Constants.NO_AUTHS); -// for (Map.Entry aSc : sc) System.out.println(aSc.getKey().getRow()); -// } - - public void testNamespace() throws Exception { - String namespace = "urn:testNamespace#"; - String prefix = "pfx"; - connection.setNamespace(prefix, namespace); - - assertEquals(namespace, connection.getNamespace(prefix)); - } - - public void testValues() throws Exception { - String query = "SELECT DISTINCT ?entity WHERE {" - + "VALUES (?entity) { () }" - + "}"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testGetNamespaces() throws Exception { - String namespace = "urn:testNamespace#"; - String prefix = "pfx"; - connection.setNamespace(prefix, namespace); - - namespace = "urn:testNamespace2#"; - prefix = "pfx2"; - connection.setNamespace(prefix, namespace); - - RepositoryResult result = connection.getNamespaces(); - int count = 0; - while (result.hasNext()) { - result.next(); - count++; - } - - assertEquals(2, count); - } - - public void testAddCommitStatement() throws Exception { - StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object")); - connection.add(stmt); - connection.commit(); - } - - public void testSelectOnlyQuery() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - "ns:uuid1 ns:createdItem ?cr.\n" + - "ns:uuid1 ns:reportedAt ?ra.\n" + - "ns:uuid1 ns:performedAt ?pa.\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testForeignSelectOnlyQuery() throws Exception { - String query; - query = "select * where { ?s ?o }"; // hits po - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(20, tupleHandler.getCount()); - - query = "select * where { ?o }"; //hits spo - tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(8, tupleHandler.getCount()); - - query = "select * where { ?s ?p '"+FAN_CH_SIM+"' }"; //hits osp - tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); -} - - - - //provenance Queries////////////////////////////////////////////////////////////////////// - - public void testEventInfo() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - " ns:uuid1 ?p ?o.\n" + - "}\n"; - - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(12, tupleHandler.getCount()); - } - - public void testAllAncestors() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - "ns:" + descendant + " ns:derivedFrom ?dr.\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - // tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testAllDescendants() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - "?ds ns:derivedFrom ns:" + ancestor + ".\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testEventsForUri() throws Exception { - String query = "PREFIX rdf: \n" + - "PREFIX ns:<" + NAMESPACE + ">\n" + - "PREFIX mvmpart: \n" + - "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" + - "PREFIX xsd: \n" + - "select * where {\n" + - "{" + - " ?s rdf:type ns:Created.\n" + - " ?s ns:createdItem ns:objectuuid1.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Clicked.\n" + - " ?s ns:clickedItem ns:objectuuid1.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Deleted.\n" + - " ?s ns:deletedItem ns:objectuuid1.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Dropped.\n" + - " ?s ns:droppedItem ns:objectuuid1.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Received.\n" + - " ?s ns:receivedItem ns:objectuuid1.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Stored.\n" + - " ?s ns:storedItem ns:objectuuid1.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Sent.\n" + - " ?s ns:sentItem ns:objectuuid1.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.setBinding(START_BINDING, vf.createLiteral(START)); -// tupleQuery.setBinding(END_BINDING, vf.createLiteral(END)); -// tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt")); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(7, tupleHandler.getCount()); - } - - public void testAllEvents() throws Exception { - String query = "PREFIX rdf: \n" + - "PREFIX ns:<" + NAMESPACE + ">\n" + - "PREFIX mvmpart: \n" + - "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" + - "PREFIX xsd: \n" + - "select * where {\n" + - "{" + - " ?s rdf:type ns:Created.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Clicked.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Deleted.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Dropped.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Received.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Stored.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "UNION {" + - " ?s rdf:type ns:Sent.\n" + - " ?s ns:performedBy ?pb.\n" + - " ?s ns:performedAt ?pa.\n" + - " FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.setBinding(START_BINDING, vf.createLiteral(START)); -// tupleQuery.setBinding(END_BINDING, vf.createLiteral(END)); -// tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt")); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(7, tupleHandler.getCount()); -// System.out.println(tupleHandler.getCount()); - } - - public void testEventsBtwnSystems() throws Exception { //TODO: How to do XMLDateTime ranges - String query = "PREFIX rdf: \n" + - "PREFIX ns:<" + NAMESPACE + ">\n" + - "PREFIX mvmpart: \n" + - "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" + - "PREFIX xsd: \n" + - "select * where {\n" + - " ?sendEvent rdf:type ns:Sent;\n" + - " ns:sentItem ?objUuid;\n" + - " ns:performedBy ;\n" + - " ns:performedAt ?spa.\n" + - " ?recEvent rdf:type ns:Received;\n" + - " ns:receivedItem ?objUuid;\n" + - " ns:performedBy ;\n" + - " ns:performedAt ?rpa.\n" + -// " FILTER(mvm:range(?spa, \"2011-07-12T05:12:00.000Z\"^^xsd:dateTime, \"2011-07-12T07:12:00.000Z\"^^xsd:dateTime))\n" + - " FILTER(mvm:range(?spa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - " FILTER(mvm:range(?rpa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.setBinding(START_BINDING, vf.createLiteral(START)); -// tupleQuery.setBinding(END_BINDING, vf.createLiteral(END)); -// tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt")); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testHeartbeatCounts() throws Exception { - String query = "PREFIX rdf: \n" + - "PREFIX hns:<" + HBNAMESPACE + ">\n" + - "PREFIX mvmpart: \n" + - "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" + - "PREFIX xsd: \n" + - "select * where {\n" + - " ?hb rdf:type hns:HeartbeatMeasurement;\n" + - " hns:count ?count;\n" + - " hns:timestamp ?ts;\n" + - " hns:systemName ?systemName.\n" + - " FILTER(mvm:range(?ts, \"" + START + "\", \"" + (START + 3) + "\"))\n" + - "}\n"; -// System.out.println(query); - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, vf.createLiteral(true)); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - } - - //provenance Queries////////////////////////////////////////////////////////////////////// - - public void testCreatedEvents() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - " ?s ns:createdItem ns:objectuuid1.\n" + - " ?s ns:reportedAt ?ra.\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testSelectAllAfterFilter() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - " ?s ns:createdItem ns:objectuuid1.\n" + - " ?s ?p ?o.\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(12, tupleHandler.getCount()); - } - - public void testFilterQuery() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - "ns:uuid1 ns:createdItem ?cr.\n" + - "ns:uuid1 ns:stringLit ?sl.\n" + - "FILTER regex(?sl, \"stringLit1\")" + - "ns:uuid1 ns:reportedAt ?ra.\n" + - "ns:uuid1 ns:performedAt ?pa.\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - // tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testMultiplePredicatesMultipleBindingSets() throws Exception { - //MMRTS-121 - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "select * where {\n" + - "?id ns:createdItem ns:objectuuid1.\n" + - "?id ns:stringLit ?sl.\n" + - "?id ns:strLit1 ?s2.\n" + - "}\n"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(12, tupleHandler.getCount()); - } - - public void testMultiShardLookupTimeRange() throws Exception { - //MMRTS-113 - String query = "PREFIX hb: \n" + - "PREFIX mvmpart: \n" + - "SELECT * WHERE\n" + - "{\n" + - "?id hb:timestamp ?timestamp.\n" + -// "FILTER(mvmpart:timeRange(?id, hb:timestamp, " + START + " , " + (START + 2) + " , 'TIMESTAMP'))\n" + - "?id hb:count ?count.\n" + - "?system hb:heartbeat ?id.\n" + - "}"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(3, tupleHandler.getCount()); - } - - public void testMultiShardLookupTimeRangeValueConst() throws Exception { - //MMRTS-113 - String query = "PREFIX hb: \n" + - "PREFIX mvmpart: \n" + - "SELECT * WHERE\n" + - "{\n" + - " hb:timestamp ?timestamp.\n" + -// "FILTER(mvmpart:timeRange(, hb:timestamp, " + START + " , " + END + " , 'TIMESTAMP'))\n" + - " hb:count ?count.\n" + - "?system hb:heartbeat .\n" + - "}"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testLinkQuery() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "SELECT * WHERE {\n" + - " ns:createdItem ?o .\n" + - " ?o ns:name ?n .\n" + - "}"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(1, tupleHandler.getCount()); - } - - public void testRangeOverDuplicateItems() throws Exception { - String query = "PREFIX ns:<" + NAMESPACE + ">\n" + - "SELECT * WHERE {\n" + - " ?subj \"obj2\" .\n" + - "}"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); -// tupleQuery.evaluate(new PrintTupleHandler()); - CountTupleHandler tupleHandler = new CountTupleHandler(); - tupleQuery.evaluate(tupleHandler); - assertEquals(2, tupleHandler.getCount()); - } - - private static class PrintTupleHandler implements TupleQueryResultHandler { - - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - System.out.println(bindingSet); - } - - @Override - public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List paramList) throws QueryResultHandlerException { - } - } - - private static class CountTupleHandler implements TupleQueryResultHandler { - - int count = 0; - - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - count++; - } - - public int getCount() { - return count; - } - - @Override - public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List paramList) throws QueryResultHandlerException { - } - } - -} diff --git a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java deleted file mode 100644 index de49ef210..000000000 --- a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya; - -// -//import java.util.List; -// -//import junit.framework.TestCase; -// -//import org.openrdf.model.BNode; -//import org.openrdf.model.Resource; -//import org.openrdf.model.URI; -//import org.openrdf.model.Value; -//import org.openrdf.model.impl.ValueFactoryImpl; -// -//import com.google.common.io.ByteStreams; -// -//import static mvm.rya.api.RdfCloudTripleStoreUtils.*; -// -//public class RdfCloudTripleStoreUtilsTest extends TestCase { -// -// public void testWriteReadURI() throws Exception { -// final ValueFactoryImpl vf = new ValueFactoryImpl(); -// URI uri = vf.createURI("http://www.example.org/test/rel"); -// byte[] value = writeValue(uri); -// -// Value readValue = readValue(ByteStreams -// .newDataInput(value), vf); -// assertEquals(uri, readValue); -// } -// -// public void testWriteReadBNode() throws Exception { -// final ValueFactoryImpl vf = new ValueFactoryImpl(); -// Value val = vf.createBNode("bnodeid"); -// byte[] value = writeValue(val); -// -// Value readValue = readValue(ByteStreams -// .newDataInput(value), vf); -// assertEquals(val, readValue); -// } -// -// public void testWriteReadLiteral() throws Exception { -// final ValueFactoryImpl vf = new ValueFactoryImpl(); -// Value val = vf.createLiteral("myliteral"); -// byte[] value = writeValue(val); -// -// Value readValue = readValue(ByteStreams -// .newDataInput(value), vf); -// assertEquals(val, readValue); -// } -// -// public void testContexts() throws Exception { -// final ValueFactoryImpl vf = new ValueFactoryImpl(); -// BNode cont1 = vf.createBNode("cont1"); -// BNode cont2 = vf.createBNode("cont2"); -// BNode cont3 = vf.createBNode("cont3"); -// -// byte[] cont_bytes = writeContexts(cont1, cont2, -// cont3); -// final String cont = new String(cont_bytes); -// System.out.println(cont); -// -// List contexts = readContexts(cont_bytes, -// vf); -// for (Resource resource : contexts) { -// System.out.println(resource); -// } -// } -//} diff --git a/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java b/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java deleted file mode 100644 index c97c8542d..000000000 --- a/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java +++ /dev/null @@ -1,992 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.layout.TablePrefixLayoutStrategy; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.joinselect.AccumuloSelectivityEvalDAO; -import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO; -import mvm.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer; -import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -public class QueryJoinSelectOptimizerTest { - - private static final String DELIM = "\u0000"; - private final byte[] EMPTY_BYTE = new byte[0]; - private final Value EMPTY_VAL = new Value(EMPTY_BYTE); - - private String q1 = ""// - + "SELECT ?h " // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + "}";// - - private String Q1 = ""// - + "SELECT ?h " // - + "{" // - + " ?h . "// - + " ?h ."// - + " ?h ."// - + "}";// - - private String q2 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " ?m . " // - + " ?m . " // - + "}";// - - private String Q2 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h . "// - + " ?h ."// - + " ?h ."// - + " ?m . " // - + " ?m . " // - + "}";// - - private String q3 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " {?m } OPTIONAL {?m }. " // - + " {?m . ?m .} UNION {?m }. " // - + " ?l ."// - + " ?l ."// - + " ?l ."// - + "}";// - - private String Q4 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h ."// - + " ?m . " // - + " ?m . " // - + " ?h ."// - + " ?h . "// - + "}";// - - private String q5 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " {?m . ?m .?m } " + " UNION {?m }. " // - + " ?l ."// - + " ?l ."// - + " ?l ."// - + "}";// - - - private String q6 = ""// - + "SELECT ?h ?l ?m" // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + " FILTER(?l = ) ." // - + " {?m . ?m .?m } " + " UNION {?m }. " // - + " ?l ."// - + " ?l ."// - + " ?l ."// - + "}";// - - private Connector conn; - AccumuloRdfConfiguration arc; - BatchWriterConfig config; - RdfEvalStatsDAO res; - Instance mock; - - @Before - public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException { - - mock = new MockInstance("accumulo"); - PasswordToken pToken = new PasswordToken("pass".getBytes()); - conn = mock.getConnector("user", pToken); - - config = new BatchWriterConfig(); - config.setMaxMemory(1000); - config.setMaxLatency(1000, TimeUnit.SECONDS); - config.setMaxWriteThreads(10); - - if (conn.tableOperations().exists("rya_prospects")) { - conn.tableOperations().delete("rya_prospects"); - } - if (conn.tableOperations().exists("rya_selectivity")) { - conn.tableOperations().delete("rya_selectivity"); - } - - arc = new AccumuloRdfConfiguration(); - arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy()); - arc.setMaxRangesForScanner(300); - res = new ProspectorServiceEvalStatsDAO(conn, arc); - - } - - @Test - public void testOptimizeQ1() throws Exception { - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - Mutation m1, m2, m3, m4; - - m1 = new Mutation(s1 + DELIM + "3"); - m1.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("1".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - - bw1.addMutations(mList); - bw1.close(); - - Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getValue().get()))); - } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - Long count1; - Long count2; - Long count3; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - i = 2 * i; - j = 2 * j; - k = 2 * k; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - bw2.addMutations(mList2); - bw2.close(); - - scan = conn.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); - - } - - TupleExpr te = getTupleExpr(q1); - - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc); - System.out.println("Originial query is " + te); - qjs.optimize(te, null, null); - Assert.assertTrue(te.equals(getTupleExpr(Q1))); - - } - - @Test - public void testOptimizeQ2() throws Exception { - - System.out.println("*********************QUERY2********************"); - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "3"); - m1.put(new Text("count"), new Text(""), new Value("4".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("5".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - - Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getValue().get()))); - } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - Long count1; - Long count2; - Long count3; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - - i = 2 * i; - j = 2 * j; - k = 2 * k; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m5); - mList2.add(m4); - mList2.add(m6); - bw2.addMutations(mList2); - bw2.close(); - - // scan = conn.createScanner("rya_selectivity" , new Authorizations()); - // scan.setRange(new Range()); - // - // for (Map.Entry entry : scan) { - // System.out.println("Key row string is " + entry.getKey().getRow().toString()); - // System.out.println("Key is " + entry.getKey()); - // System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); - // - // } - - TupleExpr te = getTupleExpr(q2); - System.out.println("Bindings are " + te.getBindingNames()); - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc); - System.out.println("Originial query is " + te); - qjs.optimize(te, null, null); - System.out.println("Optimized query is " + te); - // System.out.println("Bindings are " + te.getBindingNames()); - Assert.assertTrue(te.equals(getTupleExpr(Q2))); - - } - - @Test - public void testOptimizeQ3() throws Exception { - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble"; - String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud"; - String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field"; - String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt"; - String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks"; - - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11; - - m1 = new Mutation(s1 + DELIM + "3"); - m1.put(new Text("count"), new Text(""), new Value("5".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("5".getBytes())); - m6 = new Mutation(s6 + DELIM + "1"); - m6.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m7 = new Mutation(s7 + DELIM + "1"); - m7.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m8 = new Mutation(s8 + DELIM + "1"); - m8.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m9 = new Mutation(s9 + DELIM + "1"); - m9.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m10 = new Mutation(s10 + DELIM + "1"); - m10.put(new Text("count"), new Text(""), new Value("1".getBytes())); - - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - mList.add(m6); - mList.add(m7); - mList.add(m8); - mList.add(m9); - mList.add(m10); - - bw1.addMutations(mList); - bw1.close(); - - Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getValue().get()))); - } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(s6); - m7 = new Mutation(s7); - m8 = new Mutation(s8); - m9 = new Mutation(s9); - m10 = new Mutation(s10); - m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - int l = 5; - Long count1; - Long count2; - Long count3; - Long count4; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - count4 = (long) l; - m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL); - m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - - i = 2 * i; - j = 2 * j; - k = 2 * k; - l = 2 * l; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m5); - mList2.add(m4); - mList2.add(m6); - mList2.add(m7); - mList2.add(m8); - mList2.add(m9); - mList2.add(m10); - mList2.add(m11); - bw2.addMutations(mList2); - bw2.close(); - - scan = conn.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); - - } - - TupleExpr te = getTupleExpr(q3); - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc); - System.out.println("Originial query is " + te); - qjs.optimize(te, null, null); - - System.out.print("Optimized query is " + te); - - } - - @Test - public void testOptimizeQ4() throws Exception { - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - Mutation m1, m2, m3, m4, m5, m6; - - m1 = new Mutation(s1 + DELIM + "3"); - m1.put(new Text("count"), new Text(""), new Value("4".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("0".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("8".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("0".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - - bw1.addMutations(mList); - bw1.close(); - - Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getValue().get()))); - } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - Long count1; - Long count2; - Long count3; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - - i = 2 * i; - j = 2 * j; - k = 2 * k; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m5); - mList2.add(m4); - mList2.add(m6); - bw2.addMutations(mList2); - bw2.close(); - - scan = conn.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); - - } - - TupleExpr te = getTupleExpr(q2); - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc); - System.out.println("Originial query is " + te); - qjs.optimize(te, null, null); - Assert.assertTrue(te.equals(getTupleExpr(Q4))); - - System.out.print("Optimized query is " + te); - - } - - @Test - public void testOptimizeQ5() throws Exception { - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble"; - String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud"; - String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field"; - String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt"; - String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks"; - - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11; - - m1 = new Mutation(s1 + DELIM + "3"); - m1.put(new Text("count"), new Text(""), new Value("5".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("0".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m6 = new Mutation(s6 + DELIM + "1"); - m6.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m7 = new Mutation(s7 + DELIM + "1"); - m7.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m8 = new Mutation(s8 + DELIM + "1"); - m8.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m9 = new Mutation(s9 + DELIM + "1"); - m9.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m10 = new Mutation(s10 + DELIM + "1"); - m10.put(new Text("count"), new Text(""), new Value("1".getBytes())); - - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - mList.add(m6); - mList.add(m7); - mList.add(m8); - mList.add(m9); - mList.add(m10); - - bw1.addMutations(mList); - bw1.close(); - - Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getValue().get()))); - } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(s6); - m7 = new Mutation(s7); - m8 = new Mutation(s8); - m9 = new Mutation(s9); - m10 = new Mutation(s10); - m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - int l = 5; - Long count1; - Long count2; - Long count3; - Long count4; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - count4 = (long) l; - m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL); - m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - - i = 2 * i; - j = 2 * j; - k = 2 * k; - l = 2 * l; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m5); - mList2.add(m4); - mList2.add(m6); - mList2.add(m7); - mList2.add(m8); - mList2.add(m9); - mList2.add(m10); - mList2.add(m11); - bw2.addMutations(mList2); - bw2.close(); - - scan = conn.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); - - } - - TupleExpr te = getTupleExpr(q5); - System.out.println("Bindings are " + te.getBindingNames()); - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc); - System.out.println("Originial query is " + te); - qjs.optimize(te, null, null); - System.out.println("Bindings are " + te.getBindingNames()); - - System.out.print("Optimized query is " + te); - - } - - - - - - - - - @Test - public void testOptimizeQ6() throws Exception { - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - String s5 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television"; - String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens"; - String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble"; - String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud"; - String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field"; - String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt"; - String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks"; - - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject"); - Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11; - - m1 = new Mutation(s1 + DELIM + "3"); - m1.put(new Text("count"), new Text(""), new Value("5".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m3 = new Mutation(s3 + DELIM + "1"); - m3.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m4 = new Mutation(s4 + DELIM + "1"); - m4.put(new Text("count"), new Text(""), new Value("0".getBytes())); - m5 = new Mutation(s5 + DELIM + "1"); - m5.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m6 = new Mutation(s6 + DELIM + "1"); - m6.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m7 = new Mutation(s7 + DELIM + "1"); - m7.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m8 = new Mutation(s8 + DELIM + "1"); - m8.put(new Text("count"), new Text(""), new Value("3".getBytes())); - m9 = new Mutation(s9 + DELIM + "1"); - m9.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m10 = new Mutation(s10 + DELIM + "1"); - m10.put(new Text("count"), new Text(""), new Value("1".getBytes())); - - mList.add(m1); - mList.add(m2); - mList.add(m3); - mList.add(m4); - mList.add(m5); - mList.add(m6); - mList.add(m7); - mList.add(m8); - mList.add(m9); - mList.add(m10); - - bw1.addMutations(mList); - bw1.close(); - - Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getValue().get()))); - } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(s4); - m5 = new Mutation(s5); - m6 = new Mutation(s6); - m7 = new Mutation(s7); - m8 = new Mutation(s8); - m9 = new Mutation(s9); - m10 = new Mutation(s10); - m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - int l = 5; - Long count1; - Long count2; - Long count3; - Long count4; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - count4 = (long) l; - m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL); - m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - - i = 2 * i; - j = 2 * j; - k = 2 * k; - l = 2 * l; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m5); - mList2.add(m4); - mList2.add(m6); - mList2.add(m7); - mList2.add(m8); - mList2.add(m9); - mList2.add(m10); - mList2.add(m11); - bw2.addMutations(mList2); - bw2.close(); - - scan = conn.createScanner("rya_selectivity", new Authorizations()); - scan.setRange(new Range()); - - for (Map.Entry entry : scan) { - System.out.println("Key row string is " + entry.getKey().getRow().toString()); - System.out.println("Key is " + entry.getKey()); - System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); - - } - - TupleExpr te = getTupleExpr(q6); - TupleExpr te2 = (TupleExpr) te.clone(); - System.out.println("Bindings are " + te.getBindingNames()); - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc); - System.out.println("Originial query is " + te); - qjs.optimize(te, null, null); - - - - FilterOptimizer fo = new FilterOptimizer(); - fo.optimize(te2, null, null); - System.out.print("filter optimized query before js opt is " + te2); - qjs.optimize(te2, null, null); - - System.out.println("join selectivity opt query before filter opt is " + te); - fo.optimize(te, null, null); - - System.out.println("join selectivity opt query is " + te); - System.out.print("filter optimized query is " + te2); - - } - - - - - - - - - - - - - - - private TupleExpr getTupleExpr(String query) throws MalformedQueryException { - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(query, null); - - return pq.getTupleExpr(); - } - -} diff --git a/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java b/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java deleted file mode 100644 index c5f56cfba..000000000 --- a/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java +++ /dev/null @@ -1,304 +0,0 @@ -package mvm.rya.rdftriplestore.evaluation; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.layout.TablePrefixLayoutStrategy; -import mvm.rya.api.persist.RdfEvalStatsDAO; -import mvm.rya.joinselect.AccumuloSelectivityEvalDAO; -import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO; - -import org.apache.accumulo.core.client.AccumuloException; -import org.apache.accumulo.core.client.AccumuloSecurityException; -import org.apache.accumulo.core.client.BatchWriter; -import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Instance; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableExistsException; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Mutation; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.hadoop.io.Text; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.sparql.SPARQLParser; - -public class RdfCloudTripleStoreSelectivityEvaluationStatisticsTest { - - // TODO fix table names!!! - - private static final String DELIM = "\u0000"; - private final byte[] EMPTY_BYTE = new byte[0]; - private final Value EMPTY_VAL = new Value(EMPTY_BYTE); - - private String q1 = ""// - + "SELECT ?h " // - + "{" // - + " ?h ."// - + " ?h ."// - + " ?h . "// - + "}";// - - private Connector conn; - AccumuloRdfConfiguration arc; - BatchWriterConfig config; - Instance mock; - - @Before - public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException { - - mock = new MockInstance("accumulo"); - PasswordToken pToken = new PasswordToken("pass".getBytes()); - conn = mock.getConnector("user", pToken); - - config = new BatchWriterConfig(); - config.setMaxMemory(1000); - config.setMaxLatency(1000, TimeUnit.SECONDS); - config.setMaxWriteThreads(10); - - if (conn.tableOperations().exists("rya_prospects")) { - conn.tableOperations().delete("rya_prospects"); - } - if (conn.tableOperations().exists("rya_selectivity")) { - conn.tableOperations().delete("rya_selectivity"); - } - - arc = new AccumuloRdfConfiguration(); - arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy()); - arc.setMaxRangesForScanner(300); - - } - - @Test - public void testOptimizeQ1() throws Exception { - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setRdfEvalDAO(res); - accc.setConnector(conn); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", - "predicatesubject"); - Mutation m1, m2, m3, m4; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - m3 = new Mutation(s3 + DELIM + "3"); - m3.put(new Text("count"), new Text(""), new Value("3".getBytes())); - mList.add(m1); - mList.add(m2); - mList.add(m3); - - bw1.addMutations(mList); - bw1.close(); - -// Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); -// scan.setRange(new Range()); - -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getValue().get()))); -// } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - Long count1; - Long count2; - Long count3; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - i = 2 * i; - j = 2 * j; - k = 2 * k; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - bw2.addMutations(mList2); - bw2.close(); - -// scan = conn.createScanner("rya_selectivity", new Authorizations()); -// scan.setRange(new Range()); - -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } - - TupleExpr te = getTupleExpr(q1); - System.out.println(te); - - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - double card = ars.getCardinality(te); - - Assert.assertEquals(6.3136, card, .0001); - - } - - @Test - public void testOptimizeQ1ZeroCard() throws Exception { - - RdfEvalStatsDAO res = new ProspectorServiceEvalStatsDAO(conn, arc); - AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO(); - accc.setConf(arc); - accc.setConnector(conn); - accc.setRdfEvalDAO(res); - accc.init(); - - BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config); - BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config); - - String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog"; - String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat"; - String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant"; - List mList = new ArrayList(); - List mList2 = new ArrayList(); - List sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", - "predicatesubject"); - Mutation m1, m2, m3, m4; - - m1 = new Mutation(s1 + DELIM + "1"); - m1.put(new Text("count"), new Text(""), new Value("1".getBytes())); - m2 = new Mutation(s2 + DELIM + "2"); - m2.put(new Text("count"), new Text(""), new Value("2".getBytes())); - // m3 = new Mutation(s3 + DELIM + "3"); - // m3.put(new Text("count"), new Text(""), new Value("3".getBytes())); - mList.add(m1); - mList.add(m2); - // mList.add(m3); - - bw1.addMutations(mList); - bw1.close(); - -// Scanner scan = conn.createScanner("rya_prospects", new Authorizations()); -// scan.setRange(new Range()); - -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getValue().get()))); -// } - - m1 = new Mutation(s1); - m2 = new Mutation(s2); - m3 = new Mutation(s3); - m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality")); - m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL); - int i = 2; - int j = 3; - int k = 4; - Long count1; - Long count2; - Long count3; - - for (String s : sList) { - count1 = (long) i; - count2 = (long) j; - count3 = (long) k; - m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL); - m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL); - m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL); - i = 2 * i; - j = 2 * j; - k = 2 * k; - } - mList2.add(m1); - mList2.add(m2); - mList2.add(m3); - mList2.add(m4); - bw2.addMutations(mList2); - bw2.close(); - -// scan = conn.createScanner("rya_selectivity", new Authorizations()); -// scan.setRange(new Range()); - -// for (Map.Entry entry : scan) { -// System.out.println("Key row string is " + entry.getKey().getRow().toString()); -// System.out.println("Key is " + entry.getKey()); -// System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString()))); -// -// } - - TupleExpr te = getTupleExpr(q1); - System.out.println(te); - - RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc); - double card = ars.getCardinality(te); - - Assert.assertEquals(4.04, card, .0001); - - } - - private TupleExpr getTupleExpr(String query) throws MalformedQueryException { - - SPARQLParser sp = new SPARQLParser(); - ParsedQuery pq = sp.parseQuery(query, null); - - return pq.getTupleExpr(); - } - -} diff --git a/sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java b/sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java deleted file mode 100644 index d21412366..000000000 --- a/sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java +++ /dev/null @@ -1,115 +0,0 @@ -package mvm.rya.triplestore.inference; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.Iterations; -import junit.framework.TestCase; -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.rdftriplestore.RdfCloudTripleStore; -import mvm.rya.rdftriplestore.inference.InferenceEngine; - -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.TablePermission; -import org.junit.Test; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.ValueFactoryImpl; - -public class SameAsTest extends TestCase { - private String user = "user"; - private String pwd = "pwd"; - private String instance = "myinstance"; - private String tablePrefix = "t_"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - private AccumuloRyaDAO ryaDAO; - private ValueFactory vf = new ValueFactoryImpl(); - private String namespace = "urn:test#"; - private AccumuloRdfConfiguration conf; - - @Override - public void setUp() throws Exception { - super.setUp(); - connector = new MockInstance(instance).getConnector(user, pwd.getBytes()); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - SecurityOperations secOps = connector.securityOperations(); - secOps.createUser(user, pwd.getBytes(), auths); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ); - - conf = new AccumuloRdfConfiguration(); - ryaDAO = new AccumuloRyaDAO(); - ryaDAO.setConnector(connector); - conf.setTablePrefix(tablePrefix); - ryaDAO.setConf(conf); - ryaDAO.init(); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - } - - @Test - //This isn't a good test. It's simply a cut-and-paste from a test that was failing in a different package in the SameAsVisitor. - public void testGraphConfiguration() throws Exception { - URI a = vf.createURI(namespace, "a"); - Statement statement = new StatementImpl(a, vf.createURI(namespace, "p"), vf.createLiteral("l")); - Statement statement2 = new StatementImpl(a, vf.createURI(namespace, "p2"), vf.createLiteral("l")); - ryaDAO.add(RdfToRyaConversions.convertStatement(statement)); - ryaDAO.add(RdfToRyaConversions.convertStatement(statement2)); - ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "b"), vf.createURI(namespace, "p"), vf.createLiteral("l")))); - ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "c"), vf.createURI(namespace, "n"), vf.createLiteral("l")))); - - // build a connection - RdfCloudTripleStore store = new RdfCloudTripleStore(); - store.setConf(conf); - store.setRyaDAO(ryaDAO); - - InferenceEngine inferenceEngine = new InferenceEngine(); - inferenceEngine.setRyaDAO(ryaDAO); - store.setInferenceEngine(inferenceEngine); - - store.initialize(); - - System.out.println(Iterations.asList(store.getConnection().getStatements(a, vf.createURI(namespace, "p"), vf.createLiteral("l"), false, new Resource[0])).size()); - } -} diff --git a/sail/src/test/resources/cdrdf.xml b/sail/src/test/resources/cdrdf.xml deleted file mode 100644 index cd02ed20e..000000000 --- a/sail/src/test/resources/cdrdf.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - - Bob Dylan - USA - Columbia - 10.90 - 1985 - - - - Bonnie Tyler - UK - CBS Records - 9.90 - 1993 - - diff --git a/sail/src/test/resources/namedgraphs.trig b/sail/src/test/resources/namedgraphs.trig deleted file mode 100644 index 748d27697..000000000 --- a/sail/src/test/resources/namedgraphs.trig +++ /dev/null @@ -1,37 +0,0 @@ -@prefix rdf: . -@prefix xsd: . -@prefix swp: . -@prefix dc: . -@prefix ex: . -@prefix : . -:G1 { :Monica ex:name "Monica Murphy" . - :Monica ex:homepage . - :Monica ex:email . - :Monica ex:one . - :Monica ex:two . - :Monica ex:three . - :Monica ex:four . - :Monica ex:five . - :Monica ex:six . - :Monica ex:seven . - :Monica ex:eight . - :Monica ex:nine . - :Monica ex:ten . - :Monica ex:hasSkill ex:Management } - -:G2 { :Monica rdf:type ex:Person . - :Monica ex:hasSkill ex:Programming } - -:G4 { :Phobe ex:name "Phobe Buffet" } - -:G3 { :G1 swp:assertedBy _:w1 . - _:w1 swp:authority :Chris . - _:w1 dc:date "2003-10-02"^^xsd:date . - :G2 swp:quotedBy _:w2 . - :G4 swp:assertedBy _:w2 . - _:w2 dc:date "2003-09-03"^^xsd:date . - _:w2 swp:authority :Tom . - :Chris rdf:type ex:Person . - :Chris ex:email . - :Tom rdf:type ex:Person . - :Tom ex:email } \ No newline at end of file diff --git a/sail/src/test/resources/ntriples.nt b/sail/src/test/resources/ntriples.nt deleted file mode 100644 index edf11906a..000000000 --- a/sail/src/test/resources/ntriples.nt +++ /dev/null @@ -1 +0,0 @@ - . \ No newline at end of file diff --git a/sail/src/test/resources/reification.xml b/sail/src/test/resources/reification.xml deleted file mode 100644 index 5ab77225d..000000000 --- a/sail/src/test/resources/reification.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - - 1 - - 2011-01-07T21:29:45.545Z - - - diff --git a/sail/src/test/resources/univ-bench.owl b/sail/src/test/resources/univ-bench.owl deleted file mode 100644 index 691a330eb..000000000 --- a/sail/src/test/resources/univ-bench.owl +++ /dev/null @@ -1,466 +0,0 @@ - - - - - administrative staff worker - - - - - article - - - - - assistant professor - - - - - associate professor - - - - - book - - - - - chair - - - - - - - - - - - - - - clerical staff worker - - - - - school - - - - - conference paper - - - - - teaching course - - - - - dean - - - - - - - - - - - - - - university department - - - - - director - - - - - - - - - - - - - Employee - - - - - - - - - - - - - faculty member - - - - - full professor - - - - - Graduate Level Courses - - - - - graduate student - - - - - - - - - - - - - institute - - - - - journal article - - - - - lecturer - - - - - manual - - - - - organization - - - - person - - - - post doctorate - - - - - professor - - - - - program - - - - - publication - - - - research work - - - - - university research assistant - - - - - - - - - - - - - research group - - - - - schedule - - - - software program - - - - - published specification - - - - - student - - - - - - - - - - - - - systems staff worker - - - - - university teaching assistant - - - - - - - - - - - - - technical report - - - - - undergraduate student - - - - - university - - - - - unnoficial publication - - - - - visiting professor - - - - - Work - - - - is being advised by - - - - - - is affiliated with - - - - - - is affiliated with - - - - - - is age - - - - - has a degree from - - - - - - - has a doctoral degree from - - - - - - - can be reached at - - - - - has as an alumnus - - - - - - - is the head of - - - - - lists as a course - - - - - - has a masters degree from - - - - - - - has as a member - - - - - -member of - - - - -name - - - - office room No. - - - - publishes - - - - - - was written by - - - - - - was written on - - - - - is about - - - - - - is researching - - - - has as a research project - - - - - - is documented in - - - - - - is version - - - - - is part of - - - - - - is taking - - - - teaches - - - - - - is a teaching assistant for - - - - - - telephone number - - - - - is tenured: - - - - - title - - - - - has an undergraduate degree from - - - - - - - Works For - - - - - diff --git a/src/license/apacheV2Header.ftl b/src/license/apacheV2Header.ftl deleted file mode 100644 index 60b675e31..000000000 --- a/src/license/apacheV2Header.ftl +++ /dev/null @@ -1,16 +0,0 @@ -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. diff --git a/temp.txt b/temp.txt deleted file mode 100644 index e69de29bb..000000000 diff --git a/web/pom.xml b/web/pom.xml deleted file mode 100644 index bcc7f7a1f..000000000 --- a/web/pom.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - 4.0.0 - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - - rya.web - Apache Rya Web Projects - - pom - - - web.rya - - diff --git a/web/web.rya/pom.xml b/web/web.rya/pom.xml deleted file mode 100644 index 204bbefb3..000000000 --- a/web/web.rya/pom.xml +++ /dev/null @@ -1,136 +0,0 @@ - - - - - 4.0.0 - - org.apache.rya - rya.web - 3.2.10-SNAPSHOT - - - web.rya - Apache Rya Web Implementation - - war - - - - org.apache.rya - rya.api - - - org.apache.rya - rya.sail - - - org.apache.rya - accumulo.rya - - - org.apache.rya - rya.prospector - - - org.apache.rya - rya.indexing - - - - org.openrdf.sesame - sesame-rio-rdfxml - - - org.openrdf.sesame - sesame-queryresultio-sparqljson - - - - org.springframework.data - spring-data-hadoop - - - - org.springframework - spring-context - - - org.springframework - spring-core - - - org.springframework - spring-web - - - org.springframework - spring-webmvc - - - org.springframework - spring-beans - - - org.springframework - spring-test - - - - org.hamcrest - hamcrest-all - - - - org.slf4j - slf4j-log4j12 - - - commons-pool - commons-pool - - - - junit - junit - test - - - org.mockito - mockito-all - test - - - - web.rya - - - org.mortbay.jetty - maven-jetty-plugin - 6.1.26 - - - /web.rya - resources - - - - - - diff --git a/web/web.rya/resources/environment.properties b/web/web.rya/resources/environment.properties deleted file mode 100644 index 7848a4e88..000000000 --- a/web/web.rya/resources/environment.properties +++ /dev/null @@ -1,27 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -instance.name=cloudbase -instance.zk=localhost:2181 -instance.username=root -instance.password=secret -rya.tableprefix=triplestore_ -rya.displayqueryplan=true -mongo.db.collectionprefix=rya_ -mongo.db.instance=localhost -mongo.db.name=rya -mongo.db.port=21017 diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java deleted file mode 100644 index 313a3c32f..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.cloud.rdf.web.cloudbase.sail; - -// -//import cloudbase.core.client.Connector; -//import cloudbase.core.client.ZooKeeperInstance; -//import mvm.rya.cloudbase.CloudbaseRdfDAO; -//import mvm.rya.cloudbase.CloudbaseRdfEvalStatsDAO; -//import RdfCloudTripleStore; -//import org.openrdf.repository.Repository; -//import org.openrdf.repository.RepositoryException; -//import org.openrdf.repository.sail.SailRepository; -// -//import javax.servlet.ServletConfig; -//import javax.servlet.ServletException; -//import javax.servlet.http.HttpServlet; -// -///** -// * Class AbstractRDFWebServlet -// * Date: Dec 13, 2010 -// * Time: 9:44:08 AM -// */ -//public class AbstractRDFWebServlet extends HttpServlet implements RDFWebConstants { -// -// protected Repository repository; -// protected String origTablePrefix; -// -// @Override -// public void init(ServletConfig config) throws ServletException { -// super.init(config); -// try { -// String instance = config.getInitParameter(INSTANCE_PARAM); -// String server = config.getInitParameter(SERVER_PARAM); -// String port = config.getInitParameter(PORT_PARAM); -// String user = config.getInitParameter(USER_PARAM); -// String password = config.getInitParameter(PASSWORD_PARAM); -// String tablePrefix = config.getInitParameter(TABLEPREFIX_PARAM); -// -// RdfCloudTripleStore rts = new RdfCloudTripleStore(); -//// rts.setInstance("dne"); -//// if (instance != null) -//// rts.setInstance(instance); -//// if (server != null) -//// rts.setServer(server); -//// if (port != null) -//// rts.setPort(Integer.parseInt(port)); -//// if (user != null) -//// rts.setUser(user); -//// if (password != null) -//// rts.setPassword(password); -//// if (tablePrefix != null) { -//// rts.setTablePrefix(tablePrefix); -//// origTablePrefix = tablePrefix; -//// } -// CloudbaseRdfDAO crdfdao = new CloudbaseRdfDAO(); -// Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password"); -// crdfdao.setConnector(connector); -// crdfdao.setSpoTable("lubm_spo"); -// crdfdao.setPoTable("lubm_po"); -// crdfdao.setOspTable("lubm_osp"); -// crdfdao.setNamespaceTable("lubm_ns"); -// rts.setRdfDao(crdfdao); -// CloudbaseRdfEvalStatsDAO ceval = new CloudbaseRdfEvalStatsDAO(); -// ceval.setConnector(connector); -// ceval.setEvalTable("lubm_eval"); -// rts.setRdfEvalStatsDAO(ceval); -// -// repository = new SailRepository(rts); -// -// repository.initialize(); -// } catch (Exception e) { -// throw new ServletException(e); -// } -// } -// -// @Override -// public void destroy() { -// try { -// repository.shutDown(); -// } catch (RepositoryException e) { -// e.printStackTrace(); -// } -// } -// -// -// public Repository getRepository() { -// return repository; -// } -// -// public void setRepository(Repository repository) { -// this.repository = repository; -// } -//} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java deleted file mode 100644 index 661fe38cb..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.cloud.rdf.web.cloudbase.sail; - -// -//import org.openrdf.query.QueryLanguage; -//import org.openrdf.query.TupleQuery; -//import org.openrdf.query.resultio.TupleQueryResultWriter; -//import org.openrdf.repository.RepositoryConnection; -//import org.openrdf.repository.RepositoryException; -// -//import javax.servlet.ServletException; -//import javax.servlet.http.HttpServletRequest; -//import javax.servlet.http.HttpServletResponse; -//import java.io.IOException; -// -//public class DeleteDataServlet extends AbstractRDFWebServlet { -// -// @Override -// protected void doGet(HttpServletRequest req, HttpServletResponse resp) -// throws ServletException, IOException { -// if (req == null || req.getInputStream() == null) -// return; -// -// String query_s = req.getParameter("query"); -// -// RepositoryConnection conn = null; -// try { -// conn = repository.getConnection(); -// // query data -// TupleQuery tupleQuery = conn.prepareTupleQuery( -// QueryLanguage.SPARQL, query_s); -// TupleQueryResultWriter deleter = new mvm.mmrts.rdftriplestore.QueryResultsDeleter(conn); -// tupleQuery.evaluate(deleter); -// -// } catch (Exception e) { -// throw new ServletException(e); -// } finally { -// if (conn != null) { -// try { -// conn.close(); -// } catch (RepositoryException e) { -// -// } -// } -// } -// } -// -//} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java deleted file mode 100644 index 175ef2a5c..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.cloud.rdf.web.cloudbase.sail; - -// -//import org.openrdf.model.Resource; -//import org.openrdf.repository.RepositoryConnection; -//import org.openrdf.repository.RepositoryException; -//import org.openrdf.rio.RDFFormat; -//import org.openrdf.rio.RDFParseException; -// -//import javax.servlet.ServletException; -//import javax.servlet.ServletInputStream; -//import javax.servlet.http.HttpServletRequest; -//import javax.servlet.http.HttpServletResponse; -//import java.io.IOException; -// -//public class LoadDataServlet extends AbstractRDFWebServlet { -// -// @Override -// protected void doPost(HttpServletRequest req, HttpServletResponse resp) -// throws ServletException, IOException { -// if (req == null || req.getInputStream() == null) -// return; -// -// String format_s = req.getParameter("format"); -// RDFFormat format = RDFFormat.RDFXML; -// if (format_s != null) { -// format = RDFFormat.valueOf(format_s); -// if (format == null) -// throw new ServletException("RDFFormat[" + format_s + "] not found"); -// } -// ServletInputStream stream = req.getInputStream(); -// -// RepositoryConnection conn = null; -// try { -// conn = repository.getConnection(); -// -// // generate data -// conn.add(stream, "", format, new Resource[]{}); -// conn.commit(); -// -// conn.close(); -// } catch (RepositoryException e) { -// throw new ServletException(e); -// } catch (RDFParseException e) { -// throw new ServletException(e); -// } finally { -// if (conn != null) { -// try { -// conn.close(); -// } catch (RepositoryException e) { -// -// } -// } -// } -// } -// -//} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java deleted file mode 100644 index dfcd0354c..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.cloud.rdf.web.cloudbase.sail; - -// -//import RdfCloudTripleStoreConstants; -//import RdfCloudTripleStoreConstants; -//import org.openrdf.model.ValueFactory; -//import org.openrdf.model.impl.ValueFactoryImpl; -//import org.openrdf.query.GraphQuery; -//import org.openrdf.query.QueryLanguage; -//import org.openrdf.query.TupleQuery; -//import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter; -//import org.openrdf.repository.Repository; -//import org.openrdf.repository.RepositoryConnection; -//import org.openrdf.repository.RepositoryException; -//import org.openrdf.rio.rdfxml.RDFXMLWriter; -// -//import javax.servlet.ServletException; -//import javax.servlet.ServletOutputStream; -//import javax.servlet.http.HttpServletRequest; -//import javax.servlet.http.HttpServletResponse; -//import java.io.IOException; -//import java.io.PrintStream; -// -//public class QueryDataServlet extends AbstractRDFWebServlet { -// -// private ValueFactory vf = new ValueFactoryImpl(); -// -// @Override -// protected void doGet(HttpServletRequest req, HttpServletResponse resp) -// throws ServletException, IOException { -// if (req == null || req.getInputStream() == null) -// return; -// -// String query = req.getParameter("query"); -// String ttl = req.getParameter("ttl"); -// String startTime = req.getParameter("startTime"); -// String infer = req.getParameter("infer"); -// String performant = req.getParameter("performant"); -// String useStats = req.getParameter("useStats"); -// String timeUris = req.getParameter("timeUris"); -// String tablePrefix = req.getParameter("tablePrefix"); -// -// //validate infer, performant -// if (infer != null) { -// Boolean.parseBoolean(infer); -// } else if (performant != null) { -// Boolean.parseBoolean(performant); -// } -// -// if (query == null) { -// throw new ServletException("Please set a query"); -// } -// if (query.toLowerCase().contains("select")) { -// try { -// performSelect(query, ttl, startTime, infer, performant, useStats, timeUris, tablePrefix, resp); -// } catch (Exception e) { -// throw new ServletException(e); -// } -// } else if (query.toLowerCase().contains("construct")) { -// try { -// performConstruct(query, ttl, startTime, infer, performant, useStats, timeUris, tablePrefix, resp); -// } catch (Exception e) { -// throw new ServletException(e); -// } -// } else { -// throw new ServletException("Invalid SPARQL query: " + query); -// } -// -// } -// -// private void performConstruct(String query, String ttl, String startTime, String infer, String performant, -// String useStats, String timeUris, String tablePrefix, HttpServletResponse resp) -// throws Exception { -// RepositoryConnection conn = null; -// try { -// ServletOutputStream os = resp.getOutputStream(); -// conn = repository.getConnection(); -// -// // query data -// GraphQuery graphQuery = conn.prepareGraphQuery( -// QueryLanguage.SPARQL, query); -// if (ttl != null && ttl.length() > 0) -// graphQuery.setBinding("ttl", vf.createLiteral(Long.parseLong(ttl))); -// if (startTime != null && startTime.length() > 0) -// graphQuery.setBinding("startTime", vf.createLiteral(Long.parseLong(startTime))); -// if (performant != null && performant.length() > 0) -// graphQuery.setBinding("performant", vf.createLiteral(Boolean.parseBoolean(performant))); -// if (infer != null && infer.length() > 0) -// graphQuery.setBinding("infer", vf.createLiteral(Boolean.parseBoolean(infer))); -// if (useStats != null && useStats.length() > 0) -// graphQuery.setBinding("useStats", vf.createLiteral(Boolean.parseBoolean(useStats))); -// if (timeUris != null && timeUris.length() > 0) -// graphQuery.setBinding("timeUris", vf.createURI(timeUris)); -// if (tablePrefix != null && tablePrefix.length() > 0) -// RdfCloudTripleStoreConstants.prefixTables(tablePrefix); -// RDFXMLWriter rdfWriter = new RDFXMLWriter(os); -// graphQuery.evaluate(rdfWriter); -// -// } catch (Exception e) { -// resp.setStatus(500); -// e.printStackTrace(new PrintStream(resp.getOutputStream())); -// throw new ServletException(e); -// } finally { -// if (conn != null) { -// try { -// conn.close(); -// RdfCloudTripleStoreConstants.prefixTables(origTablePrefix); -// } catch (RepositoryException e) { -// -// } -// } -// } -// } -// -// private void performSelect(String query, String ttl, String startTime, String infer, String performant, -// String useStats, String timeUris, String tablePrefix, HttpServletResponse resp) -// throws Exception { -// RepositoryConnection conn = null; -// try { -// ServletOutputStream os = resp.getOutputStream(); -// conn = repository.getConnection(); -// -// // query data -// TupleQuery tupleQuery = conn.prepareTupleQuery( -// QueryLanguage.SPARQL, query); -// if (ttl != null && ttl.length() > 0) -// tupleQuery.setBinding("ttl", vf.createLiteral(Long.parseLong(ttl))); -// if (startTime != null && startTime.length() > 0) -// tupleQuery.setBinding("startTime", vf.createLiteral(Long.parseLong(startTime))); -// if (performant != null && performant.length() > 0) -// tupleQuery.setBinding("performant", vf.createLiteral(Boolean.parseBoolean(performant))); -// if (infer != null && infer.length() > 0) -// tupleQuery.setBinding("infer", vf.createLiteral(Boolean.parseBoolean(infer))); -// if (useStats != null && useStats.length() > 0) -// tupleQuery.setBinding("useStats", vf.createLiteral(Boolean.parseBoolean(useStats))); -// if (timeUris != null && timeUris.length() > 0) -// tupleQuery.setBinding("timeUris", vf.createURI(timeUris)); -// if (tablePrefix != null && tablePrefix.length() > 0) -// RdfCloudTripleStoreConstants.prefixTables(tablePrefix); -// SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(os); -// tupleQuery.evaluate(sparqlWriter); -// -// } catch (Exception e) { -// resp.setStatus(500); -// e.printStackTrace(new PrintStream(resp.getOutputStream())); -// throw new ServletException(e); -// } finally { -// if (conn != null) { -// try { -// conn.close(); -// RdfCloudTripleStoreConstants.prefixTables(origTablePrefix); -// } catch (RepositoryException e) { -// -// } -// } -// } -// } -// -// public Repository getRepository() { -// return repository; -// } -// -// public void setRepository(Repository repository) { -// this.repository = repository; -// } -//} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java deleted file mode 100644 index b1eb5e3f4..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.cloud.rdf.web.cloudbase.sail; - -// -//import org.openrdf.query.GraphQuery; -//import org.openrdf.query.QueryLanguage; -//import org.openrdf.query.TupleQuery; -//import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter; -//import org.openrdf.repository.Repository; -//import org.openrdf.repository.RepositoryConnection; -//import org.openrdf.repository.RepositoryException; -//import org.openrdf.rio.rdfxml.RDFXMLWriter; -// -//import javax.servlet.ServletException; -//import javax.servlet.ServletOutputStream; -//import javax.servlet.http.HttpServletRequest; -//import javax.servlet.http.HttpServletResponse; -//import java.io.IOException; -//import java.io.PrintStream; -// -//public class QuerySerqlDataServlet extends AbstractRDFWebServlet { -// -// @Override -// protected void doGet(HttpServletRequest req, HttpServletResponse resp) -// throws ServletException, IOException { -// if (req == null || req.getInputStream() == null) -// return; -// -// String query = req.getParameter("query"); -// -// if (query == null) { -// throw new ServletException("Please set a query"); -// } -// -// if (query.toLowerCase().contains("select")) { -// try { -// performSelect(query, resp); -// } catch (Exception e) { -// throw new ServletException(e); -// } -// } else if (query.toLowerCase().contains("construct")) { -// try { -// performConstruct(query, resp); -// } catch (Exception e) { -// throw new ServletException(e); -// } -// } else { -// throw new ServletException("Invalid SERQL query: " + query); -// } -// -// } -// -// private void performConstruct(String query, HttpServletResponse resp) -// throws Exception { -// RepositoryConnection conn = null; -// try { -// ServletOutputStream os = resp.getOutputStream(); -// conn = repository.getConnection(); -// -// // query data -// GraphQuery graphQuery = conn.prepareGraphQuery( -// QueryLanguage.SERQL, query); -// RDFXMLWriter rdfWriter = new RDFXMLWriter(os); -// graphQuery.evaluate(rdfWriter); -// -// conn.close(); -// } catch (Exception e) { -// resp.setStatus(500); -// e.printStackTrace(new PrintStream(resp.getOutputStream())); -// throw new ServletException(e); -// } finally { -// if (conn != null) { -// try { -// conn.close(); -// } catch (RepositoryException e) { -// -// } -// } -// } -// } -// -// private void performSelect(String query, HttpServletResponse resp) -// throws Exception { -// RepositoryConnection conn = null; -// try { -// ServletOutputStream os = resp.getOutputStream(); -// conn = repository.getConnection(); -// -// // query data -// TupleQuery tupleQuery = conn.prepareTupleQuery( -// QueryLanguage.SERQL, query); -// SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(os); -// tupleQuery.evaluate(sparqlWriter); -// -// conn.close(); -// } catch (Exception e) { -// resp.setStatus(500); -// e.printStackTrace(new PrintStream(resp.getOutputStream())); -// throw new ServletException(e); -// } finally { -// if (conn != null) { -// try { -// conn.close(); -// } catch (RepositoryException e) { -// -// } -// } -// } -// } -// -// public Repository getRepository() { -// return repository; -// } -// -// public void setRepository(Repository repository) { -// this.repository = repository; -// } -//} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java deleted file mode 100644 index 16cfe718a..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.cloud.rdf.web.cloudbase.sail; - -// -///** -// * Interface RDFWebConstants -// * Date: Dec 13, 2010 -// * Time: 9:39:45 AM -// */ -//public interface RDFWebConstants { -// public static final String INSTANCE_PARAM = "rts.instance"; -// public static final String SERVER_PARAM = "rts.server"; -// public static final String PORT_PARAM = "rts.port"; -// public static final String USER_PARAM = "rts.user"; -// public static final String PASSWORD_PARAM = "rts.password"; -// public static final String TABLEPREFIX_PARAM = "rts.tableprefix"; -//} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java deleted file mode 100644 index bc6272a1d..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java +++ /dev/null @@ -1,344 +0,0 @@ -package mvm.cloud.rdf.web.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static mvm.rya.api.RdfCloudTripleStoreConstants.AUTH_NAMESPACE; -import static mvm.rya.api.RdfCloudTripleStoreConstants.VALUE_FACTORY; - -import java.io.IOException; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.List; -import java.util.Timer; -import java.util.TimerTask; - -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import mvm.rya.api.security.SecurityProvider; -import mvm.rya.api.RdfCloudTripleStoreConfiguration; - -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.query.BindingSet; -import org.openrdf.query.GraphQuery; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.QueryResultHandlerException; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResultHandler; -import org.openrdf.query.TupleQueryResultHandlerException; -import org.openrdf.query.Update; -import org.openrdf.query.UpdateExecutionException; -import org.openrdf.query.parser.ParsedGraphQuery; -import org.openrdf.query.parser.ParsedOperation; -import org.openrdf.query.parser.ParsedTupleQuery; -import org.openrdf.query.parser.ParsedUpdate; -import org.openrdf.query.parser.QueryParserUtil; -import org.openrdf.query.resultio.sparqljson.SPARQLResultsJSONWriter; -import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter; -import org.openrdf.repository.Repository; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryException; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandler; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.rdfxml.RDFXMLWriter; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Controller; -import org.springframework.util.StringUtils; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; - -/** - * Class RdfController - * Date: Mar 7, 2012 - * Time: 11:07:19 AM - */ -@Controller -public class RdfController { - - private static final int QUERY_TIME_OUT_SECONDS = 120; - - @Autowired - Repository repository; - - @Autowired - SecurityProvider provider; - - @RequestMapping(value = "/queryrdf", method = {RequestMethod.GET, RequestMethod.POST}) - public void queryRdf(@RequestParam("query") String query, - @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, required = false) String auth, - @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_INFER, required = false) String infer, - @RequestParam(value = "nullout", required = false) String nullout, - @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_RESULT_FORMAT, required = false) String emit, - @RequestParam(value = "padding", required = false) String padding, - @RequestParam(value = "callback", required = false) String callback, - HttpServletRequest request, - HttpServletResponse response) { - RepositoryConnection conn = null; - final Thread queryThread = Thread.currentThread(); - auth = StringUtils.arrayToCommaDelimitedString(provider.getUserAuths(request)); - Timer timer = new Timer(); - timer.schedule(new TimerTask() { - - @Override - public void run() { - System.out.println("interrupting"); - queryThread.interrupt(); - - } - }, QUERY_TIME_OUT_SECONDS * 1000); - - try { - ServletOutputStream os = response.getOutputStream(); - conn = repository.getConnection(); - - Boolean isBlankQuery = StringUtils.isEmpty(query); - ParsedOperation operation = QueryParserUtil.parseOperation(QueryLanguage.SPARQL, query, null); - - Boolean requestedCallback = !StringUtils.isEmpty(callback); - Boolean requestedFormat = !StringUtils.isEmpty(emit); - - if (requestedCallback) { - os.print(callback + "("); - } - - if (!isBlankQuery) { - if (operation instanceof ParsedGraphQuery) { - // Perform Tupple Query - RDFHandler handler = new RDFXMLWriter(os); - response.setContentType("text/xml"); - performGraphQuery(query, conn, auth, infer, nullout, handler); - } else if (operation instanceof ParsedTupleQuery) { - // Perform Tupple Query - TupleQueryResultHandler handler; - - if (requestedFormat && emit.equalsIgnoreCase("json")) { - handler = new SPARQLResultsJSONWriter(os); - response.setContentType("application/json"); - } else { - handler = new SPARQLResultsXMLWriter(os); - response.setContentType("text/xml"); - } - - performQuery(query, conn, auth, infer, nullout, handler); - } else if (operation instanceof ParsedUpdate) { - // Perform Update Query - performUpdate(query, conn, os, auth, infer); - } else { - throw new MalformedQueryException("Cannot process query. Query type not supported."); - } - } - - if (requestedCallback) { - os.print(")"); - } - } catch (Exception e) { - e.printStackTrace(); - throw new RuntimeException(e); - } finally { - if (conn != null) { - try { - conn.close(); - } catch (RepositoryException e) { - e.printStackTrace(); - } - } - } - - timer.cancel(); - } - - private void performQuery(String query, RepositoryConnection conn, String auth, String infer, String nullout, TupleQueryResultHandler handler) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException { - TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); - if (auth != null && auth.length() > 0) - tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, VALUE_FACTORY.createLiteral(auth)); - if (infer != null && infer.length() > 0) - tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer))); - if (nullout != null && nullout.length() > 0) { - //output nothing, but still run query - tupleQuery.evaluate(new TupleQueryResultHandler() { - @Override - public void startQueryResult(List strings) throws TupleQueryResultHandlerException { - } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - } - - @Override - public void handleSolution(BindingSet bindings) throws TupleQueryResultHandlerException { - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - } - }); - } else { - CountingTupleQueryResultHandlerWrapper sparqlWriter = new CountingTupleQueryResultHandlerWrapper(handler); - long startTime = System.currentTimeMillis(); - tupleQuery.evaluate(sparqlWriter); - System.out.format("Query Time = %.3f\n", (System.currentTimeMillis() - startTime) / 1000.); - System.out.format("Result Count = %s\n", sparqlWriter.getCount()); - } - - } - - private void performGraphQuery(String query, RepositoryConnection conn, String auth, String infer, String nullout, RDFHandler handler) throws RepositoryException, MalformedQueryException, QueryEvaluationException, RDFHandlerException { - GraphQuery graphQuery = conn.prepareGraphQuery(QueryLanguage.SPARQL, query); - if (auth != null && auth.length() > 0) - graphQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, VALUE_FACTORY.createLiteral(auth)); - if (infer != null && infer.length() > 0) - graphQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer))); - if (nullout != null && nullout.length() > 0) { - //output nothing, but still run query - // TODO this seems like a strange use case. - graphQuery.evaluate(new RDFHandler() { - @Override - public void startRDF() throws RDFHandlerException { - } - - @Override - public void endRDF() throws RDFHandlerException { - } - - @Override - public void handleNamespace(String prefix, String uri) - throws RDFHandlerException { - } - - @Override - public void handleStatement(Statement st) - throws RDFHandlerException { - } - - @Override - public void handleComment(String comment) - throws RDFHandlerException { - } - }); - } else { - long startTime = System.currentTimeMillis(); - graphQuery.evaluate(handler); - System.out.format("Query Time = %.3f\n", (System.currentTimeMillis() - startTime) / 1000.); - } - - } - private void performUpdate(String query, RepositoryConnection conn, ServletOutputStream os, String auth, String infer) throws RepositoryException, MalformedQueryException, IOException { - Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query); - if (auth != null && auth.length() > 0) - update.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, VALUE_FACTORY.createLiteral(auth)); - if (infer != null && infer.length() > 0) - update.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer))); - - long startTime = System.currentTimeMillis(); - - try { - update.execute(); - } catch (UpdateExecutionException e) { - os.print(String.format("Update could not be successfully completed for query: %s\n\n", query)); - os.print(String.format("\n\n%s", e.getLocalizedMessage())); - } - - System.out.format("Update Time = %.3f\n", (System.currentTimeMillis() - startTime) / 1000.); - } - - private static final class CountingTupleQueryResultHandlerWrapper implements TupleQueryResultHandler { - private TupleQueryResultHandler indir; - private int count = 0; - - public CountingTupleQueryResultHandlerWrapper(TupleQueryResultHandler indir){ - this.indir = indir; - } - - public int getCount() { return count; } - - @Override - public void endQueryResult() throws TupleQueryResultHandlerException { - indir.endQueryResult(); - } - - @Override - public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { - count++; - indir.handleSolution(bindingSet); - } - @Override - public void startQueryResult(List bindingNames) throws TupleQueryResultHandlerException { - count = 0; - indir.startQueryResult(bindingNames); - } - - @Override - public void handleBoolean(boolean arg0) throws QueryResultHandlerException { - } - - @Override - public void handleLinks(List arg0) throws QueryResultHandlerException { - } - } - - @RequestMapping(value = "/loadrdf", method = RequestMethod.POST) - public void loadRdf(@RequestParam(required = false) String format, - @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_CV, required = false) String cv, - @RequestParam(required = false) String graph, - @RequestBody String body, - HttpServletResponse response) - throws RepositoryException, IOException, RDFParseException { - List authList = new ArrayList(); - RDFFormat format_r = RDFFormat.RDFXML; - if (format != null) { - format_r = RDFFormat.valueOf(format); - if (format_r == null) - throw new RuntimeException("RDFFormat[" + format + "] not found"); - } - if (graph != null) { - authList.add(VALUE_FACTORY.createURI(graph)); - } - RepositoryConnection conn = null; - try { - conn = repository.getConnection(); - if (cv != null && cv.length() > 0) { - String[] auths = cv.split("\\|"); - for (String auth : auths) { - authList.add(VALUE_FACTORY.createURI(AUTH_NAMESPACE, auth)); - } - } - conn.add(new StringReader(body), "", format_r, authList.toArray(new Resource[authList.size()])); - conn.commit(); - } finally { - if (conn != null) { - conn.close(); - } - } - } -} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java deleted file mode 100644 index 7e763d579..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java +++ /dev/null @@ -1,27 +0,0 @@ -package mvm.cloud.rdf.web.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -public enum ResultFormat { - XML, JSON, JSONP - -} diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java deleted file mode 100644 index 58fc0f482..000000000 --- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java +++ /dev/null @@ -1,34 +0,0 @@ -package mvm.cloud.rdf.web.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import javax.servlet.http.HttpServletRequest; - -import mvm.rya.api.security.SecurityProvider; - -public class SecurityProviderImpl implements SecurityProvider{ - - public String[] getUserAuths(HttpServletRequest incRequest) { - String[] auths = incRequest.getParameterValues("query.auth"); - return auths; - } - -} diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml deleted file mode 100644 index 7f9caaf16..000000000 --- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml deleted file mode 100644 index 8d5ee6954..000000000 --- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml deleted file mode 100644 index 85ea26e27..000000000 --- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml deleted file mode 100644 index 67feae8b8..000000000 --- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml +++ /dev/null @@ -1,105 +0,0 @@ - - - - - - - - - - - - - - - - - sc.cloudbase.instancename=${instance.name} - sc.cloudbase.zookeepers=${instance.zk} - sc.cloudbase.username=${instance.username} - sc.cloudbase.password=${instance.password} - - query.printqueryplan=${rya.displayqueryplan} - - sc.freetext.doctable=${sc.freetext.doctable} - sc.freetext.termtable=${sc.freetext.termtable} - sc.geo.table=${sc.geo.table} - sc.geo.predicates=${sc.geo.predicates} - sc.geo.numPartitions=${sc.geo.numPartitions} - sc.temporal.index=${sc.temporal.index} - - query.usestats=false - query.useselectivity=false - query.usecompositecard=false - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml deleted file mode 100644 index b42a22274..000000000 --- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml deleted file mode 100644 index c15e9d312..000000000 --- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - diff --git a/web/web.rya/src/main/webapp/WEB-INF/web.xml b/web/web.rya/src/main/webapp/WEB-INF/web.xml deleted file mode 100644 index 5f53e4b82..000000000 --- a/web/web.rya/src/main/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,166 +0,0 @@ - - - - - - - RDF Cloud Triple Store Web Access - - springrdf - org.springframework.web.servlet.DispatcherServlet - - contextConfigLocation - - /WEB-INF/spring/spring-root.xml - - - 1 - - - - springrdf - / - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/main/webapp/crossdomain.xml b/web/web.rya/src/main/webapp/crossdomain.xml deleted file mode 100644 index cec91f6f6..000000000 --- a/web/web.rya/src/main/webapp/crossdomain.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - diff --git a/web/web.rya/src/main/webapp/sparqlQuery.jsp b/web/web.rya/src/main/webapp/sparqlQuery.jsp deleted file mode 100644 index d026a509c..000000000 --- a/web/web.rya/src/main/webapp/sparqlQuery.jsp +++ /dev/null @@ -1,79 +0,0 @@ - - -<%@ page contentType="text/html; charset=iso-8859-1" language="java" %> -<%@ page import="java.net.*" %> -<% - String sparql=request.getParameter("sparql"); - String infer=request.getParameter("infer"); - String auth=request.getParameter("auth"); - String resultFormat = request.getParameter("emit"); - String padding = request.getParameter("padding"); - - if(sparql != null){ - String sparqlEnc = URLEncoder.encode(sparql,"UTF-8"); - String urlTo = "queryrdf?query.infer="+infer+"&query.auth="+auth+"&query.resultformat="+resultFormat+"&padding="+padding+"&query="+sparqlEnc; - response.sendRedirect(urlTo); - } -%> - - -
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  
SPARQL Query:
Inferencing?(true/false)
Authorization
Result Format
JSONP Padding
 
  
- - - diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java deleted file mode 100644 index 061bda9f1..000000000 --- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java +++ /dev/null @@ -1,476 +0,0 @@ -package mvm.cloud.rdf.web.cloudbase.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; -import java.net.URLEncoder; - -public class DeleteDataServletRun { - - public static void main(String[] args) { - try { -// String query = "SELECT ?artist WHERE { ?abt \"1988\"." + -// " }"; - String artistQuery = "SELECT ?artist WHERE { " - + " ?abt ?artist . " - + " ?abt \"1993\" . " - + "}"; -// String query = "SELECT ?pred ?obj WHERE { ?pred ?obj }"; -// String query = "SELECT ?pred ?label ?obj WHERE { ?pred ?obj ." + -// " ?obj ?label }"; - long dayBefore = System.currentTimeMillis() - 86400000; - System.out.println(dayBefore); -// String query = "SELECT DISTINCT ?obj WHERE { ?serv . " + -// " ?serv ?obj ." + -// " ?serv ?ts ." + -//// " FILTER (?ts >= '"+dayBefore+"') " + -// " }" + -// " ORDER BY ?obj "; - - String giveAllClusters = "SELECT DISTINCT ?uu WHERE { ?uu ?obj . " + - " }" + - " ORDER BY ?uu "; - -// String query = "SELECT DISTINCT ?obj WHERE { ?obj . " + -// " }" + -// " ORDER BY ?obj "; - - //hasfunction query - String hasFunctionQuery = "SELECT DISTINCT ?obj WHERE { ?uu . " + - " ?uu ?obj" + - " }" + - " ORDER BY ?obj "; - - String allFunctions = "SELECT DISTINCT ?func ?obj WHERE { ?uu ?func . " + - " ?uu ?obj" + - " }" + - " ORDER BY ?func "; - - String allFunctionsThresh = "SELECT DISTINCT ?func ?obj ?thresh WHERE { ?uu ?func . " + - " ?uu ?obj ." + - " ?uu ?thresh" + - " }" + - " ORDER BY ?func "; - - - String cwdQuery = "SELECT DISTINCT ?obj ?packname WHERE { ?subj ?obj . " + - " ?subj ?instPacks ." + - " ?instPacks ?packid ." + - " ?packid ?packname } "; - - String cwdAllServersQuery = "SELECT DISTINCT ?obj WHERE { ?subj ?obj } "; - - // rearrange for better filter - // 0.124s - String lubm1 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x ub:takesCourse .\n" + -// " ?x rdf:type ub:GraduateStudent .\n" + - " }"; - - // 142s - // not sure why it is so long will have to do some more tests - String lubm2 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?z ub:subOrganizationOf ?y .\n" + - " ?y rdf:type ub:University .\n" + - " ?z rdf:type ub:Department .\n" + - " ?x ub:memberOf ?z .\n" + - " ?x rdf:type ub:GraduateStudent .\n" + - " ?x ub:undergraduateDegreeFrom ?y .\n" + - " }"; - - String lubm2_a = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type ub:GraduateStudent .\n" + - " ?x ub:memberOf ?z .\n" + - " ?z ub:subOrganizationOf ?y .\n" + - " ?z rdf:type ub:Department .\n" + - " ?y rdf:type ub:University .\n" + -// " ?x ub:undergraduateDegreeFrom ?y .\n" + - " }"; - - // 0.127s - // Rearranged to put the assistant professor first, better filtering - String lubm3 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x ub:publicationAuthor .\n" + - " ?x rdf:type ub:Publication .\n" + - " }"; - -// had to infer relationships myself -// 0.671s - String lubm4 = "PREFIX rdf: \n" + - " PREFIX rdfs: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?y ub:worksFor .\n" + - " ?x rdfs:subClassOf ub:Professor .\n" + - " ?y rdf:type ?x .\n" + - " ?y ub:name ?y1 .\n" + - " ?y ub:emailAddress ?y2 .\n" + - " ?y ub:telephone ?y3 .\n" + - " }"; - - //lubm5, we cannot do inferring for more than one level now. Person is too difficult - - //lubm6, we cannot do the implicit inference between Student and GraduateStudent - - //lubm14 - //0.1s - String lubm14 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type ub:UndergraduateStudent .\n" + - " }"; - - String bongoAllCollections = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type bg:Collection .\n" + - " ?x bg:uniqueid ?uid .\n" + - " ?x bg:title ?title .\n" + - " ?x bg:hasAuthor ?author .\n" + - " ?x bg:marking ?marking .\n" + - " }"; - - String bongoEntriesForCategory = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:hasCategory ?category .\n" + - " FILTER (?category = \"cat1\") \n" + - " }"; - - String bongoEntriesForAuthor = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:hasAuthor ?author .\n" + - " FILTER (?author = \"andrew2\") \n" + - " }"; - - String bongoEntriesForModifiedTime = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT DISTINCT ?entryid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:modifiedTime ?modifiedTime .\n" + - " FILTER (xsd:dateTime(?modifiedTime) >= \"2011-10-21T13:18:30\"^^xsd:dateTime) \n" + - " }"; - String bongoEntriesSortTitle = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT DISTINCT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:title ?title .\n" + - " } ORDER BY ?title"; - - String bongoEntriesForTitle = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT DISTINCT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:title ?title .\n" + - " FILTER (regex(?title,\"Entry1Title\")) }"; - - String bongoQuery = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?col rdf:type bg:Collection .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2\\/S\\/P\\/Stock\\/Google_simple\\/6 bg:uniqueid ?uniqueid} .\n" + -// " OPTIONAL{ bg:'latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6' bg:title ?title} .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:name ?name} .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:marking ?marking} .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:hasAuthor ?author} .\n" + - " }"; - - String bongoAllEntriesInCollection = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?y bg:inCollection bg:CollA .\n" + - " ?y rdf:type bg:Entry .\n" + - " ?y bg:uniqueid ?uid .\n" + - " ?y bg:title ?title .\n" + - " ?y bg:etag ?etag .\n" + - " }"; - - String bongoAllForEntry1 = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " bg:EntryM rdf:type bg:Entry .\n" + -// " bg:EntryN bg:inCollection bg:CollectionN .\n" + - " bg:EntryM bg:mimeType ?mimeType .\n" + - " bg:EntryM bg:etag ?etag .\n" + - " OPTIONAL { bg:EntryM bg:slug ?slug}.\n" + - " bg:EntryM bg:uniqueid ?uniqueid .\n" + -// " bg:EntryN bg:title ?title .\n" + -// " bg:EntryN bg:marking ?marking .\n" + -// " bg:EntryN bg:mediaMarking ?mediaMarking .\n" + -// " bg:EntryN bg:editedTime ?editedTime .\n" + -// " bg:EntryN bg:modifiedTime ?modifiedTime .\n" + -// " bg:EntryN bg:publishedTime ?publishedTime .\n" + -// " bg:EntryN bg:mediaStorageId ?mediaStorageId .\n" + -// " bg:EntryN bg:mediaModifiedTime ?mediaModifiedTime .\n" + -// " bg:EntryN bg:entryStorageId ?entryStorageId .\n" + - " }"; - - String bongoEntryAllAuthors = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " bg:Entry1 bg:hasAuthor ?y .\n" + - " }"; - - String bongoEntriesModAfter = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x bg:editedTime ?edTime .\n" + - " FILTER (xsd:dateTime(?edTime) >= \"2010-01-01T00:00:00\"^^xsd:dateTime)\n" + - " }"; - - String cimData = "PREFIX rdf: \n" + - " PREFIX mm: \n" + - " PREFIX xsd: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type mm:ComputerSystem .\n" + - " ?x mm:hasRunningOS ?y .\n" + - " ?y mm:name ?z .\n" + - " }"; - - String cimData2 = "PREFIX rdf: \n" + - " PREFIX mm: \n" + - " PREFIX mmcs: \n" + - " SELECT ?pred ?obj WHERE {\n" + - " mmcs:computersystem ?pred ?obj\n" + - " }"; - - String cimData3 = "PREFIX rdf: \n" + - "SELECT ?pred ?obj WHERE {\n" + - " ?pred ?obj\n" + - "}"; - - String cimHasInstalledSoftware = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT DISTINCT ?obj ?name ?caption WHERE {\n" + -// " mm:hasInstalledSoftware ?obj .\n" + - " ?serv mm:hasInstalledSoftware ?obj .\n" + - " ?obj mm:name ?name ;\n" + - " mm:caption ?caption .\n" + - "}"; - - String cimHasRunningSoftware = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * WHERE {\n" + - " mm:hasRunningProcess ?obj .\n" + - " ?obj mm:name ?name ; \n" + - " mm:handle ?handle ; \n" + - " mm:description ?description ; \n" + - " mm:caption ?caption ; \n" + - " mm:parameters ?params . \n" + - "}"; - - String cimCpu = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * \n" + - "WHERE {\n" + - " mm:hasProcessor ?obj .\n" + - " ?obj mm:maxClockSpeed ?speed .\n" + - " ?obj mm:loadPercentage ?load .\n" + - " ?obj mm:elementName ?type ." + - "}"; - - String cimCpuLoad = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * \n" + - "WHERE {\n" + - " mm:hasProcessor ?obj .\n" + - " ?obj mm:loadPercentage ?load ." + - "}"; - - - String cimHasFileSystem = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * WHERE {\n" + -// " mm:hasFileSystem ?obj ." + - " ?serv mm:hasFileSystem ?obj ." + - " ?obj mm:availableSpace ?available .\n" + - " ?obj mm:fileSystemSize ?size .\n" + - " ?obj mm:percentageSpaceUse ?use ." + - "}"; - - String clusterKolm = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT ?name ?cluster ?srv ?ncd ?thresh ?ts WHERE {\n" + - " ?cluster kolm:relatesTo ?pt ;\n" + - " kolm:threshold ?thresh .\n" + - " ?pt kolm:serverRef ?srv ;\n" + - " kolm:ncd ?ncd ;\n" + - " kolm:timestamp ?ts .\n" + - " ?srv mm:CSName ?name .\n" + - "} \n" + - " ORDER BY ?cluster ?srv ?ncd"; - - String clusterKolm2 = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT ?cserv ?srv ?ncd ?thresh ?ts WHERE {\n" + - " ?cpt kolm:ncd \"0.0\" .\n" + - " ?cpt kolm:serverRef ?cserv .\n" + - " ?cluster kolm:relatesTo ?cpt ;\n" + - " kolm:relatesTo ?pt ;\n" + - " kolm:timestamp ?cts ;\n" + - " kolm:threshold ?thresh .\n" + - " ?pt kolm:serverRef ?srv ;\n" + - " kolm:ncd ?ncd ;\n" + - " kolm:timestamp ?ts .\n" + -// " ?srv mm:CSName ?name .\n" + - " FILTER (?cts >= \"1290616617624\")" + - "} \n" + - " ORDER BY ?cserv ?ncd ?srv"; - - String clusterKolmOtherClusters = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT ?cserv ?srv ?ncd WHERE {\n" + - " ?cpt kolm:ncd \"0.0\" .\n" + - " ?cpt kolm:serverRef ?cserv .\n" + - " ?cluster kolm:relatesTo ?cpt .\n" + - " ?cluster kolm:distanceTo ?pt .\n" + - " ?cluster kolm:timestamp ?cts .\n" + -// " kolm:threshold ?thresh .\n" + - " ?pt kolm:serverRef ?srv ;\n" + - " kolm:ncd ?ncd ;\n" + - " kolm:timestamp ?ts .\n" + -// " ?srv mm:CSName ?name .\n" + - " FILTER (?cts >= \"1290616617624\")" + - "} \n" + - " ORDER BY ?cserv ?srv ?ncd"; - - String clusterKolmStratus13 = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT DISTINCT ?srv ?ncd WHERE {\n" + - " ?pt kolm:serverRef .\n" + - " ?cluster kolm:relatesTo ?pt .\n" + - " ?cluster kolm:relatesTo ?pt2 .\n" + - " ?pt2 kolm:serverRef ?srv .\n" + -// " ?cluster kolm:relatesTo ?pt ;\n" + -// " kolm:threshold ?thresh .\n" + -// " ?pt kolm:serverRef ;\n" + - " ?pt2 kolm:ncd ?ncd .\n" + - " ?cluster kolm:timestamp ?ts .\n" + -// " mm:CSName ?name .\n" + - "} \n" + - " ORDER BY ?ncd"; - - String cimLatestMeasure = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT ?proc ?val ?time WHERE {\n" + - " ?proc mm:loadPercentage ?val .\n" + - " ?subj rdf:subject ?proc .\n" + - " ?subj rdf:object ?val2 .\n" + - " ?subj rdf:type rdf:Statement ;\n" + - " \t mm:reportedAt ?time .\n" + - " FILTER (?val2 = ?val) }\n" + - "ORDER BY DESC(?time)\n" + - "LIMIT 250"; - - String deleteBlankNodesCim = "prefix Base: \n" + - "prefix Core: \n" + - "\n" + - "select *\n" + - "{ \n" + - "\n" + - "?subj a Core:UnitaryComputerSystem .\n" + - "?subj ?pred ?obj .\n" + - "FILTER isBlank(?server).\n" + - "}"; - -// String query = "DELETE {?subj } WHERE { ?subj }"; -// - String query = deleteBlankNodesCim; - System.out.println(query); - System.out.println(System.currentTimeMillis()); - - /** - * Create url object to POST to the running container - */ - - String queryenc = URLEncoder.encode(query, "UTF-8"); - - URL url = new URL("http://10.40.190.113:8080/rdfTripleStore/deletequery?query=" + queryenc); - URLConnection urlConnection = url.openConnection(); - urlConnection.setDoOutput(true); - - /** - * Get the corresponding response from server, if any - */ - BufferedReader rd = new BufferedReader(new InputStreamReader( - urlConnection.getInputStream())); - String line; - while ((line = rd.readLine()) != null) { - System.out.println(line); - } - rd.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - -} diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java deleted file mode 100644 index 1bfc2785e..000000000 --- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java +++ /dev/null @@ -1,66 +0,0 @@ -package mvm.cloud.rdf.web.cloudbase.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.net.URL; -import java.net.URLConnection; - -public class LoadDataServletRun { - - public static void main(String[] args) { - try { - final InputStream resourceAsStream = Thread.currentThread().getContextClassLoader() - .getResourceAsStream("namedgraphs.trig"); - URL url = new URL("http://localhost:8080/web.rya/loadrdf" + - "?format=Trig" + - "&cv=ROSH|ANDR"); - URLConnection urlConnection = url.openConnection(); - urlConnection.setRequestProperty("Content-Type", "text/plain"); - urlConnection.setDoOutput(true); - - final OutputStream os = urlConnection.getOutputStream(); - - int read; - while((read = resourceAsStream.read()) >= 0) { - os.write(read); - } - resourceAsStream.close(); - os.flush(); - - BufferedReader rd = new BufferedReader(new InputStreamReader( - urlConnection.getInputStream())); - String line; - while ((line = rd.readLine()) != null) { - System.out.println(line); - } - rd.close(); - os.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - -} diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java deleted file mode 100644 index e9d918a20..000000000 --- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java +++ /dev/null @@ -1,467 +0,0 @@ -package mvm.cloud.rdf.web.cloudbase.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; -import java.net.URLEncoder; - -public class QueryDataServletRun { - - public static void main(String[] args) { - try { -// String query = "SELECT ?artist WHERE { ?abt \"1988\"." + -// " }"; - String artistQuery = "SELECT ?artist WHERE { " - + " ?abt ?artist . " - + " ?abt \"1993\" . " - + "}"; -// String query = "SELECT ?pred ?obj WHERE { ?pred ?obj }"; -// String query = "SELECT ?pred ?label ?obj WHERE { ?pred ?obj ." + -// " ?obj ?label }"; - long dayBefore = System.currentTimeMillis() - 86400000; - System.out.println(dayBefore); -// String query = "SELECT DISTINCT ?obj WHERE { ?serv . " + -// " ?serv ?obj ." + -// " ?serv ?ts ." + -//// " FILTER (?ts >= '"+dayBefore+"') " + -// " }" + -// " ORDER BY ?obj "; - - String giveAllClusters = "SELECT DISTINCT ?uu WHERE { ?uu ?obj . " + - " }" + - " ORDER BY ?uu "; - -// String query = "SELECT DISTINCT ?obj WHERE { ?obj . " + -// " }" + -// " ORDER BY ?obj "; - - //hasfunction query - String hasFunctionQuery = "SELECT DISTINCT ?obj WHERE { ?uu . " + - " ?uu ?obj" + - " }" + - " ORDER BY ?obj "; - - String allFunctions = "SELECT DISTINCT ?func ?obj WHERE { ?uu ?func . " + - " ?uu ?obj" + - " }" + - " ORDER BY ?func "; - - String allFunctionsThresh = "SELECT DISTINCT ?func ?obj ?thresh WHERE { ?uu ?func . " + - " ?uu ?obj ." + - " ?uu ?thresh" + - " }" + - " ORDER BY ?func "; - - - String cwdQuery = "SELECT DISTINCT ?obj ?packname WHERE { ?subj ?obj . " + - " ?subj ?instPacks ." + - " ?instPacks ?packid ." + - " ?packid ?packname } "; - - String cwdAllServersQuery = "SELECT DISTINCT ?obj WHERE { ?subj ?obj } "; - - // rearrange for better filter - // 0.124s - String lubm1 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x ub:takesCourse .\n" + - " ?x rdf:type ub:GraduateStudent .\n" + - " }"; - - // 142s - // not sure why it is so long will have to do some more tests - String lubm2 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?z ub:subOrganizationOf ?y .\n" + - " ?y rdf:type ub:University .\n" + - " ?z rdf:type ub:Department .\n" + - " ?x ub:memberOf ?z .\n" + - " ?x rdf:type ub:GraduateStudent .\n" + - " ?x ub:undergraduateDegreeFrom ?y .\n" + - " }"; - - String lubm2_a = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type ub:GraduateStudent .\n" + - " ?x ub:memberOf ?z .\n" + - " ?z ub:subOrganizationOf ?y .\n" + - " ?z rdf:type ub:Department .\n" + - " ?y rdf:type ub:University .\n" + -// " ?x ub:undergraduateDegreeFrom ?y .\n" + - " }"; - - // 0.127s - // Rearranged to put the assistant professor first, better filtering - String lubm3 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x ub:publicationAuthor .\n" + - " ?x rdf:type ub:Publication .\n" + - " }"; - -// had to infer relationships myself -// 0.671s - String lubm4 = "PREFIX rdf: \n" + - " PREFIX rdfs: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?y ub:worksFor .\n" + - " ?x rdfs:subClassOf ub:Professor .\n" + - " ?y rdf:type ?x .\n" + - " ?y ub:name ?y1 .\n" + - " ?y ub:emailAddress ?y2 .\n" + - " ?y ub:telephone ?y3 .\n" + - " }"; - - //lubm5, we cannot do inferring for more than one level now. Person is too difficult - - //lubm6, we cannot do the implicit inference between Student and GraduateStudent - - //lubm14 - //0.1s - String lubm14 = "PREFIX rdf: \n" + - " PREFIX ub: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type ub:UndergraduateStudent .\n" + - " }"; - - String bongoAllCollections = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type bg:Collection .\n" + - " ?x bg:uniqueid ?uid .\n" + - " ?x bg:title ?title .\n" + - " ?x bg:hasAuthor ?author .\n" + - " ?x bg:marking ?marking .\n" + - " }"; - - String bongoEntriesForCategory = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:hasCategory ?category .\n" + - " FILTER (?category = \"cat1\") \n" + - " }"; - - String bongoEntriesForAuthor = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:hasAuthor ?author .\n" + - " FILTER (?author = \"andrew2\") \n" + - " }"; - - String bongoEntriesForModifiedTime = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT DISTINCT ?entryid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:modifiedTime ?modifiedTime .\n" + - " FILTER (xsd:dateTime(?modifiedTime) >= \"2011-10-21T13:18:30\"^^xsd:dateTime) \n" + - " }"; - String bongoEntriesSortTitle = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT DISTINCT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:title ?title .\n" + - " } ORDER BY ?title"; - - String bongoEntriesForTitle = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT DISTINCT ?uniqueid WHERE\n" + - " {\n" + - " ?entryid bg:inCollection bg:CollA .\n" + - " ?entryid rdf:type bg:Entry .\n" + - " ?entryid bg:uniqueid ?uniqueid .\n" + - " ?entryid bg:title ?title .\n" + - " FILTER (regex(?title,\"Entry1Title\")) }"; - - String bongoQuery = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?col rdf:type bg:Collection .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2\\/S\\/P\\/Stock\\/Google_simple\\/6 bg:uniqueid ?uniqueid} .\n" + -// " OPTIONAL{ bg:'latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6' bg:title ?title} .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:name ?name} .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:marking ?marking} .\n" + -// " OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:hasAuthor ?author} .\n" + - " }"; - - String bongoAllEntriesInCollection = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?y bg:inCollection bg:CollA .\n" + - " ?y rdf:type bg:Entry .\n" + - " ?y bg:uniqueid ?uid .\n" + - " ?y bg:title ?title .\n" + - " ?y bg:etag ?etag .\n" + - " }"; - - String bongoAllForEntry1 = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " bg:EntryM rdf:type bg:Entry .\n" + -// " bg:EntryN bg:inCollection bg:CollectionN .\n" + - " bg:EntryM bg:mimeType ?mimeType .\n" + - " bg:EntryM bg:etag ?etag .\n" + - " OPTIONAL { bg:EntryM bg:slug ?slug}.\n" + - " bg:EntryM bg:uniqueid ?uniqueid .\n" + -// " bg:EntryN bg:title ?title .\n" + -// " bg:EntryN bg:marking ?marking .\n" + -// " bg:EntryN bg:mediaMarking ?mediaMarking .\n" + -// " bg:EntryN bg:editedTime ?editedTime .\n" + -// " bg:EntryN bg:modifiedTime ?modifiedTime .\n" + -// " bg:EntryN bg:publishedTime ?publishedTime .\n" + -// " bg:EntryN bg:mediaStorageId ?mediaStorageId .\n" + -// " bg:EntryN bg:mediaModifiedTime ?mediaModifiedTime .\n" + -// " bg:EntryN bg:entryStorageId ?entryStorageId .\n" + - " }"; - - String bongoEntryAllAuthors = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " SELECT * WHERE\n" + - " {\n" + - " bg:Entry1 bg:hasAuthor ?y .\n" + - " }"; - - String bongoEntriesModAfter = "PREFIX rdf: \n" + - " PREFIX bg: \n" + - " PREFIX xsd: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x bg:editedTime ?edTime .\n" + - " FILTER (xsd:dateTime(?edTime) >= \"2010-01-01T00:00:00\"^^xsd:dateTime)\n" + - " }"; - - String cimData = "PREFIX rdf: \n" + - " PREFIX mm: \n" + - " PREFIX xsd: \n" + - " SELECT * WHERE\n" + - " {\n" + - " ?x rdf:type mm:ComputerSystem .\n" + - " ?x mm:hasRunningOS ?y .\n" + - " ?y mm:name ?z .\n" + - " }"; - - String cimData2 = "PREFIX rdf: \n" + - " PREFIX mm: \n" + - " PREFIX mmcs: \n" + - " SELECT ?pred ?obj WHERE {\n" + - " mmcs:computersystem ?pred ?obj\n" + - " }"; - - String cimData3 = "PREFIX rdf: \n" + - "SELECT ?pred ?obj WHERE {\n" + - " ?pred ?obj\n" + - "}"; - - String cimHasInstalledSoftware = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT DISTINCT ?obj ?name ?caption WHERE {\n" + -// " mm:hasInstalledSoftware ?obj .\n" + - " ?serv mm:hasInstalledSoftware ?obj .\n" + - " ?obj mm:name ?name ;\n" + - " mm:caption ?caption .\n" + - "}"; - - String cimHasRunningSoftware = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * WHERE {\n" + - " mm:hasRunningProcess ?obj .\n" + - " ?obj mm:name ?name ; \n" + - " mm:handle ?handle ; \n" + - " mm:description ?description ; \n" + - " mm:caption ?caption ; \n" + - " mm:parameters ?params . \n" + - "}"; - - String cimCpu = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * \n" + - "WHERE {\n" + - " mm:hasProcessor ?obj .\n" + - " ?obj mm:maxClockSpeed ?speed .\n" + - " ?obj mm:loadPercentage ?load .\n" + - " ?obj mm:elementName ?type ." + - "}"; - - String cimCpuLoad = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * \n" + - "WHERE {\n" + - " mm:hasProcessor ?obj .\n" + - " ?obj mm:loadPercentage ?load ." + - "}"; - - - String cimHasFileSystem = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT * WHERE {\n" + -// " mm:hasFileSystem ?obj ." + - " ?serv mm:hasFileSystem ?obj ." + - " ?obj mm:availableSpace ?available .\n" + - " ?obj mm:fileSystemSize ?size .\n" + - " ?obj mm:percentageSpaceUse ?use ." + - "}"; - - String clusterKolm = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT ?name ?cluster ?srv ?ncd ?thresh ?ts WHERE {\n" + - " ?cluster kolm:relatesTo ?pt ;\n" + - " kolm:threshold ?thresh .\n" + - " ?pt kolm:serverRef ?srv ;\n" + - " kolm:ncd ?ncd ;\n" + - " kolm:timestamp ?ts .\n" + - " ?srv mm:CSName ?name .\n" + - "} \n" + - " ORDER BY ?cluster ?srv ?ncd"; - - String clusterKolm2 = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT ?cserv ?srv ?ncd ?thresh ?ts WHERE {\n" + - " ?cpt kolm:ncd \"0.0\" .\n" + - " ?cpt kolm:serverRef ?cserv .\n" + - " ?cluster kolm:relatesTo ?cpt ;\n" + - " kolm:relatesTo ?pt ;\n" + - " kolm:timestamp ?cts ;\n" + - " kolm:threshold ?thresh .\n" + - " ?pt kolm:serverRef ?srv ;\n" + - " kolm:ncd ?ncd ;\n" + - " kolm:timestamp ?ts .\n" + -// " ?srv mm:CSName ?name .\n" + - " FILTER (?cts >= \"1290616617624\")" + - "} \n" + - " ORDER BY ?cserv ?ncd ?srv"; - - String clusterKolmOtherClusters = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT ?cserv ?srv ?ncd WHERE {\n" + - " ?cpt kolm:ncd \"0.0\" .\n" + - " ?cpt kolm:serverRef ?cserv .\n" + - " ?cluster kolm:relatesTo ?cpt .\n" + - " ?cluster kolm:distanceTo ?pt .\n" + - " ?cluster kolm:timestamp ?cts .\n" + -// " kolm:threshold ?thresh .\n" + - " ?pt kolm:serverRef ?srv ;\n" + - " kolm:ncd ?ncd ;\n" + - " kolm:timestamp ?ts .\n" + -// " ?srv mm:CSName ?name .\n" + - " FILTER (?cts >= \"1290616617624\")" + - "} \n" + - " ORDER BY ?cserv ?srv ?ncd"; - - String clusterKolmStratus13 = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "PREFIX kolm: \n" + - "SELECT DISTINCT ?srv ?ncd WHERE {\n" + - " ?pt kolm:serverRef .\n" + - " ?cluster kolm:relatesTo ?pt .\n" + - " ?cluster kolm:relatesTo ?pt2 .\n" + - " ?pt2 kolm:serverRef ?srv .\n" + -// " ?cluster kolm:relatesTo ?pt ;\n" + -// " kolm:threshold ?thresh .\n" + -// " ?pt kolm:serverRef ;\n" + - " ?pt2 kolm:ncd ?ncd .\n" + - " ?cluster kolm:timestamp ?ts .\n" + -// " mm:CSName ?name .\n" + - "} \n" + - " ORDER BY ?ncd"; - - String cimLatestMeasure = "PREFIX rdf: \n" + - "PREFIX mm: \n" + - "SELECT ?proc ?val ?time WHERE {\n" + - " ?proc mm:loadPercentage ?val .\n" + - " ?subj rdf:subject ?proc .\n" + - " ?subj rdf:object ?val2 .\n" + - " ?subj rdf:type rdf:Statement ;\n" + - " \t mm:reportedAt ?time .\n" + - " FILTER (?val2 = ?val) }\n" + - "ORDER BY DESC(?time)\n" + - "LIMIT 250"; - -// String query = "DELETE {?subj } WHERE { ?subj }"; -// - String query = "select * where {\n" + - " ?p ?o.\n" + - "}"; - System.out.println(query); - System.out.println(System.currentTimeMillis()); - - /** - * Create url object to POST to the running container - */ - - String queryenc = URLEncoder.encode(query, "UTF-8"); - - URL url = new URL("http://localhost:8080/rdfTripleStore/queryrdf?query=" + queryenc); - URLConnection urlConnection = url.openConnection(); - urlConnection.setDoOutput(true); - - /** - * Get the corresponding response from server, if any - */ - BufferedReader rd = new BufferedReader(new InputStreamReader( - urlConnection.getInputStream())); - String line; - while ((line = rd.readLine()) != null) { - System.out.println(line); - } - rd.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - -} diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java deleted file mode 100644 index eea0badb2..000000000 --- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java +++ /dev/null @@ -1,86 +0,0 @@ -package mvm.cloud.rdf.web.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.openrdf.repository.Repository; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.rio.RDFFormat; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.MediaType; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.context.web.WebAppConfiguration; -import org.springframework.test.web.servlet.MockMvc; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import static org.springframework.test.web.servlet.setup.MockMvcBuilders.standaloneSetup; - -@RunWith(SpringJUnit4ClassRunner.class) -@WebAppConfiguration -@ContextConfiguration({"/controllerIntegrationTest-accumulo.xml", "/controllerIntegrationTest-root.xml"}) -public class RdfControllerIntegrationTest { - - private MockMvc mockMvc; - - @Autowired - private RdfController controller; - - @Autowired - private Repository repository; - - @Rule - public ExpectedException thrown = ExpectedException.none(); - - @Before - public void setup() { - this.mockMvc = standaloneSetup(controller).build(); - try { - RepositoryConnection con = repository.getConnection(); - con.add(getClass().getResourceAsStream("/test.nt"), "", RDFFormat.NTRIPLES); - con.close(); - } catch (Exception e) { - e.printStackTrace(); - throw new RuntimeException(e); - } - } - - @Test - public void emptyQuery() throws Exception { - mockMvc.perform(get("/queryrdf?query=")) - .andExpect(status().isOk()); - } - - @Test - public void emptyQueryXMLFormat() throws Exception { - this.mockMvc.perform(get("/queryrdf") - .param("query", "SELECT * WHERE { ?s a . }") - .param("query.resultformat", "xml")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.TEXT_XML)); - } -} diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java deleted file mode 100644 index 7888457cc..000000000 --- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java +++ /dev/null @@ -1,160 +0,0 @@ -package mvm.cloud.rdf.web.sail; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import static org.hamcrest.Matchers.equalToIgnoringWhiteSpace; -import static org.junit.Assert.assertTrue; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import static org.springframework.test.web.servlet.setup.MockMvcBuilders.standaloneSetup; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.openrdf.model.Literal; -import org.openrdf.model.URI; -import org.openrdf.model.ValueFactory; -import org.openrdf.repository.Repository; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.rio.RDFFormat; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.MediaType; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.context.web.WebAppConfiguration; -import org.springframework.test.web.servlet.MockMvc; -import org.springframework.test.web.servlet.ResultActions; -import org.springframework.web.context.WebApplicationContext; -import org.springframework.web.util.NestedServletException; - -@RunWith(SpringJUnit4ClassRunner.class) -@WebAppConfiguration -@ContextConfiguration("/controllerTest-context.xml") -public class RdfControllerTest { - - @Autowired - private WebApplicationContext wac; - - private MockMvc mockMvc; - - @Autowired - private RdfController controller; - - @Autowired - private Repository repository; - - @Rule public ExpectedException thrown = ExpectedException.none(); - - @Before - public void setup() { - this.mockMvc = standaloneSetup(controller).build(); - try { - RepositoryConnection con = repository.getConnection(); - con.add(getClass().getResourceAsStream("/test.nt"), "", RDFFormat.NTRIPLES); - con.close(); - } catch (Exception e) { - e.printStackTrace(); - throw new RuntimeException(e); - } - } - - @Test - public void emptyQuery() throws Exception { - this.mockMvc.perform(get("/queryrdf?query=")) - .andExpect(status().isOk()); - } - - @Test - public void emptyQueryXMLFormat() throws Exception { - this.mockMvc.perform(get("/queryrdf") - .param("query", "SELECT * WHERE { ?s ?p ?o . }") - .param("query.resultformat", "xml")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.TEXT_XML)); - } - - @Test - public void emptyQueryJSONFormat() throws Exception { - this.mockMvc.perform(get("/queryrdf") - .param("query", "SELECT * WHERE { ?s ?p ?o . }") - .param("query.resultformat", "json")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)); - } - - @Test - public void emptyQueryNoFormat() throws Exception { - this.mockMvc.perform(get("/queryrdf") - .param("query", "SELECT * WHERE { ?s ?p ?o . }")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.TEXT_XML)); - } - - @Test - public void callback() throws Exception { - this.mockMvc.perform(get("/queryrdf") - .param("query", "") - .param("callback", "test")) - .andExpect(status().isOk()) - .andExpect(content().string(equalToIgnoringWhiteSpace("test()"))); - } - - @Test - public void malformedQuery() throws Exception { - thrown.expect(NestedServletException.class); - this.mockMvc.perform(get("/queryrdf") - .param("query", "SELECT nothing WHERE { ?s ?p ?o }")); - } - - @Test - public void updateQuery() throws Exception { - this.mockMvc.perform(get("/queryrdf") - .param("query", "INSERT DATA { \"test\" }")) - .andExpect(status().isOk()); - - ValueFactory vf = repository.getValueFactory(); - RepositoryConnection con = repository.getConnection(); - - URI s = vf.createURI("http://mynamespace/ProductType1"); - URI p = vf.createURI("http://mynamespace#pred1"); - Literal o = vf.createLiteral("test"); - - assertTrue(con.getStatements(s, p, o, false).hasNext()); - } - - @Test - public void constructQuery() throws Exception { - this.mockMvc.perform(get("/queryrdf") - .param("query", "INSERT DATA { \"test\" }")) - .andExpect(status().isOk()); - - ResultActions actions = this.mockMvc.perform(get("/queryrdf") - .param("query", "CONSTRUCT {?subj \"test2\"} WHERE { ?subj \"test\" }")) - .andExpect(status().isOk()); -// System.out.println(actions.andReturn().getResponse().getContentAsString()); - - } - -} diff --git a/web/web.rya/src/test/resources/cdrdf.xml b/web/web.rya/src/test/resources/cdrdf.xml deleted file mode 100644 index 96829f03c..000000000 --- a/web/web.rya/src/test/resources/cdrdf.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - - Bob Dylan - USA - Columbia - 10.90 - 1985 - - - - Bonnie Tyler3 - UK - CBS Records - 9.90 - 1993 - - diff --git a/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml b/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml deleted file mode 100644 index 5b20d57a2..000000000 --- a/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml b/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml deleted file mode 100644 index 7fab80d39..000000000 --- a/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/test/resources/controllerTest-context.xml b/web/web.rya/src/test/resources/controllerTest-context.xml deleted file mode 100644 index ec43c5bef..000000000 --- a/web/web.rya/src/test/resources/controllerTest-context.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/web/web.rya/src/test/resources/dummyData/memorystore.data b/web/web.rya/src/test/resources/dummyData/memorystore.data deleted file mode 100644 index 44f60f2962dc4cb06aba2541a64ea46ebdefc9a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 290 zcmV+-0p0#WO;bh!ABzY800000006a-PfNov7>B!V%0&DG9<1&tZTbhnc(YRwyg7F+ zS(@2ko76WcHK%=bQx76TYANiJTz>EK9G;2eI8(^C_PZ{$uI~_P+cs#EK*Ae}Fvcj1 zQ5^er<0~UH`SgWaOzhsJxqZ)nT>+!2kae&n?+np=;U4c{_Xg;(syVPMZJ=c8qha0t zvIj^&bEW5Yb&|@B_h3pQMoF%EBvdv+gXt{O_3Hir{HiA6n?GO5vZ*$ljFNagk{UQ= zo#H48100UxU-a+|BhcYz5x%|om;S_amKoVVR@gh84DWoFN+CG&UQNj$86_In+Fd^% o0RNNvbeXQa7e*!IYLsVC?HL{t`!D>!*f`mJ0Vd~#Tr&g!08?d&wg3PC diff --git a/web/web.rya/src/test/resources/namedgraphs.trig b/web/web.rya/src/test/resources/namedgraphs.trig deleted file mode 100644 index 748d27697..000000000 --- a/web/web.rya/src/test/resources/namedgraphs.trig +++ /dev/null @@ -1,37 +0,0 @@ -@prefix rdf: . -@prefix xsd: . -@prefix swp: . -@prefix dc: . -@prefix ex: . -@prefix : . -:G1 { :Monica ex:name "Monica Murphy" . - :Monica ex:homepage . - :Monica ex:email . - :Monica ex:one . - :Monica ex:two . - :Monica ex:three . - :Monica ex:four . - :Monica ex:five . - :Monica ex:six . - :Monica ex:seven . - :Monica ex:eight . - :Monica ex:nine . - :Monica ex:ten . - :Monica ex:hasSkill ex:Management } - -:G2 { :Monica rdf:type ex:Person . - :Monica ex:hasSkill ex:Programming } - -:G4 { :Phobe ex:name "Phobe Buffet" } - -:G3 { :G1 swp:assertedBy _:w1 . - _:w1 swp:authority :Chris . - _:w1 dc:date "2003-10-02"^^xsd:date . - :G2 swp:quotedBy _:w2 . - :G4 swp:assertedBy _:w2 . - _:w2 dc:date "2003-09-03"^^xsd:date . - _:w2 swp:authority :Tom . - :Chris rdf:type ex:Person . - :Chris ex:email . - :Tom rdf:type ex:Person . - :Tom ex:email } \ No newline at end of file diff --git a/web/web.rya/src/test/resources/test.nt b/web/web.rya/src/test/resources/test.nt deleted file mode 100644 index 4031f67b9..000000000 --- a/web/web.rya/src/test/resources/test.nt +++ /dev/null @@ -1,3 +0,0 @@ - . - "Thing" . - . From 2aea6297f122d9e487066b658807270b17cbcd2c Mon Sep 17 00:00:00 2001 From: Aaron Mihalik Date: Mon, 7 Dec 2015 07:08:28 -0500 Subject: [PATCH 3/6] Edited Readme to wake-up mirror process --- pom.xml | 700 -------------------------------------------------------- 1 file changed, 700 deletions(-) delete mode 100644 pom.xml diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 5f2164fb3..000000000 --- a/pom.xml +++ /dev/null @@ -1,700 +0,0 @@ - - - - 4.0.0 - - org.apache - apache - 17 - - - org.apache.rya - rya-project - 3.2.10-SNAPSHOT - - pom - - Apache Rya Project - - 2015 - - The Apache Software Foundation - http://www.apache.org/ - - - - Apache License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0 - - - - - Dev - dev-subscribe@rya.incubator.apache.org - dev-unsubscribe@rya.incubator.apache.org - dev@rya.incubator.apache.org - http://mail-archives.apache.org/mod_mbox/rya-dev - - - - ${maven.min-version} - - - common - dao - extras - osgi - pig - sail - web - - - 2.7.6 - - - 1.6.4 - 2.5.2 - - 0.9.2 - - 1.1.0-rc.6 - 3.6.2 - 2.1 - - 2.13.3 - 1.50.0 - - 2.5.0 - - 2.6 - 1.10 - 1.6 - - 1.3 - 2.3.11 - - 18.0 - - 2.2.11 - 1.2.0 - 3.4 - 1.7.2 - 1.3 - 2.7.5 - - 3.2.15.RELEASE - 1.0.2.RELEASE - 1.1.0.RELEASE - - 4.12 - 1.10.19 - 1.1.0 - 1.6.6 - - UTF-8 - UTF-8 - - - 3.0.4 - - - - - org.apache.rya - rya.api - ${project.version} - - - org.apache.rya - rya.sail - ${project.version} - - - org.apache.rya - accumulo.rya - ${project.version} - - - org.apache.rya - mongodb.rya - ${project.version} - - - org.apache.rya - accumulo.utils - ${project.version} - - - org.apache.rya - rya.prospector - ${project.version} - - - org.apache.rya - rya.provenance - ${project.version} - - - org.apache.rya - rya.indexing - ${project.version} - - - org.apache.rya - rya.indexing - accumulo-server - ${project.version} - - - org.apache.rya - rya.indexing - map-reduce - ${project.version} - - - - org.apache.accumulo - accumulo-core - ${accumulo.version} - - - - org.apache.rya - sesame-runtime-osgi - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-runtime - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-model - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-query - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-queryalgebra-model - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-queryparser-sparql - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-queryresultio-sparqlxml - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-rio-rdfxml - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-runtime-osgi - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-http-client - - - org.openrdf.sesame - sesame-http-server-spring - - - - - org.openrdf.sesame - sesame-rio-ntriples - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-rio-nquads - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-rio-trig - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-queryalgebra-evaluation - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-queryresultio-sparqljson - ${openrdf.sesame.version} - - - org.openrdf.sesame - sesame-repository-api - ${openrdf.sesame.version} - - - - com.google.guava - guava - ${guava.version} - - - org.hamcrest - hamcrest-all - ${hamcrest.version} - - - - org.apache.camel - camel-core - ${camel.version} - - - org.apache.camel - camel-test - ${camel.version} - - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.slf4j - slf4j-log4j12 - ${slf4j.version} - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - - com.tinkerpop.blueprints - blueprints-core - ${blueprints.version} - - - com.tinkerpop.blueprints - blueprints-sail-graph - ${blueprints.version} - - - com.tinkerpop.gremlin - gremlin-groovy - ${blueprints.version} - - - com.tinkerpop.rexster - rexster-server - ${blueprints.version} - - - - org.mongodb - mongo-java-driver - ${mongodb.version} - - - de.flapdoodle.embed - de.flapdoodle.embed.mongo - ${embed.mongo.version} - - - - - org.codehaus.groovy - groovy-all - ${groovy.version} - - - org.codehaus.gmaven.runtime - gmaven-runtime-1.7 - ${gmaven.version} - - - org.codehaus.groovy - groovy-all - - - - - - - org.springframework.shell - spring-shell - ${spring.shell.version} - - - org.springframework - spring-context - ${spring.version} - - - org.springframework - spring-core - ${spring.version} - - - org.springframework - spring-web - ${spring.version} - - - org.springframework - spring-webmvc - ${spring.version} - - - org.springframework - spring-beans - ${spring.version} - - - org.springframework - spring-test - ${spring.version} - - - org.springframework.data - spring-data-hadoop - ${spring.hadoop.version} - - - - org.apache.karaf.shell - org.apache.karaf.shell.console - ${karaf.version} - provided - - - - - commons-lang - commons-lang - ${commons.lang.version} - - - commons-codec - commons-codec - ${commons.codec.version} - - - commons-pool - commons-pool - ${commons.pool.version} - - - - - org.apache.lucene - lucene-core - ${lucene.version} - - - org.apache.lucene - lucene-analyzers - ${lucene.version} - - - - - org.locationtech.geomesa - geomesa-accumulo-datastore - ${geomesa.version} - - - org.locationtech.geomesa - geomesa-distributed-runtime - ${geomesa.version} - - - - joda-time - joda-time - ${joda-time.version} - - - - org.apache.pig - pig - ${pig.version} - - - org.antlr - antlr-runtime - ${antlr-runtime.version} - provided - - - - net.sf.ehcache - ehcache-core - ${ehcache.version} - - - - org.calrissian.mango - mango-core - ${mango.version} - - - - org.mockito - mockito-all - ${mockito.version} - test - - - junit - junit - ${junit.version} - test - - - org.apache.mrunit - mrunit - ${mrunit.version} - hadoop2 - test - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${project.build.sourceEncoding} - - - - org.apache.maven.plugins - maven-resources-plugin - - ${project.build.sourceEncoding} - - - - org.apache.maven.plugins - maven-surefire-plugin - - -Dfile.encoding=${project.build.sourceEncoding} - - - - org.apache.maven.plugins - maven-checkstyle-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-javadoc-plugin - - true - - - - org.apache.maven.plugins - maven-dependency-plugin - - - org.apache.maven.plugins - maven-enforcer-plugin - - - - [${maven.min-version},) - - - - - - org.apache.maven.plugins - maven-release-plugin - - true - true - install - - - - org.codehaus.gmaven - gmaven-plugin - ${gmaven.version} - - - org.codehaus.groovy - groovy-all - ${groovy.version} - - - org.codehaus.gmaven.runtime - gmaven-runtime-1.7 - ${gmaven.version} - - - org.codehaus.groovy - groovy-all - - - - - - - - 1.7 - - - generateStubs - compile - generateTestStubs - testCompile - - - - - - org.apache.maven.plugins - maven-shade-plugin - - true - - - - package - - shade - - - - - - - - - org.apache.rat - apache-rat-plugin - - - - **/*.ntriples - **/*.trig - **/*.ttl - **/*.owl - **/*.nt - - - **/resources/META-INF/services/** - - - - - - - - - org.apache.maven.plugins - maven-shade-plugin - 2.3 - - - maven-source-plugin - - - attach-sources - package - - jar-no-fork - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - 1.6 - 1.6 - - - - org.apache.maven.plugins - maven-enforcer-plugin - - - enforce-mvn - - enforce - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - **/*IntegrationTest.java - - - - - - - - - - LocationTech - SNAPSHOT - https://repo.locationtech.org/content/repositories/snapshots/ - - - LocationTech - RELEASE - https://repo.locationtech.org/content/repositories/releases/ - - - - - scm:git:git://git.apache.org/incubator-rya.git - scm:git:https://git-wip-us.apache.org/repos/asf/incubator-rya.git - v3.2.10-SNAPSHOT - https://git-wip-us.apache.org/repos/asf?p=incubator-rya.git - - From ef9819803cce267d087f9e4b2a28bbcf1c2e3443 Mon Sep 17 00:00:00 2001 From: Aaron Mihalik Date: Mon, 7 Dec 2015 07:20:05 -0500 Subject: [PATCH 4/6] Edited Readme to wake-up mirror process --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 0376054c6..cb5c2f57c 100644 --- a/README.md +++ b/README.md @@ -19,5 +19,7 @@ under the License. --> This branch does not contain any source code. It exists for two reasons: -1. We pushed a large change and this broke the process that pushes changes to github. We needed a small commit to wake up the process, so we created this branch and pushed a small change. -2. Apache currently prevents branch deletes \ No newline at end of file +1. We pushed a large change and this broke the process that pushes changes to GitHub. We needed a small commit to wake up the process, so we created this branch and pushed a small change. +2. Apache currently prevents branch deletes + +If you push a large change and it doesn't immediately get mirrored to GitHub, feel free to edit this readme, push the commit, and hopefully the process wakes up. :) From d52d58cef14df2e54ee3dda7629162c74ae9e243 Mon Sep 17 00:00:00 2001 From: Aaron Mihalik Date: Thu, 11 Aug 2016 16:25:22 -0400 Subject: [PATCH 5/6] Edited Readme to wake-up mirror process --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index cb5c2f57c..c2bc6abcf 100644 --- a/README.md +++ b/README.md @@ -22,4 +22,5 @@ This branch does not contain any source code. It exists for two reasons: 1. We pushed a large change and this broke the process that pushes changes to GitHub. We needed a small commit to wake up the process, so we created this branch and pushed a small change. 2. Apache currently prevents branch deletes + If you push a large change and it doesn't immediately get mirrored to GitHub, feel free to edit this readme, push the commit, and hopefully the process wakes up. :) From e93f0d0a8deb383404c92d78980f11b7a504f145 Mon Sep 17 00:00:00 2001 From: Aaron Mihalik Date: Thu, 11 Aug 2016 18:44:31 -0400 Subject: [PATCH 6/6] Edited Readme to wake-up mirror process --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index c2bc6abcf..ed9a7355b 100644 --- a/README.md +++ b/README.md @@ -23,4 +23,5 @@ This branch does not contain any source code. It exists for two reasons: 2. Apache currently prevents branch deletes + If you push a large change and it doesn't immediately get mirrored to GitHub, feel free to edit this readme, push the commit, and hopefully the process wakes up. :)