From 8bbb1dcacea996f81b989e360578a64ba3b78bfb Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Wed, 2 Mar 2016 09:22:20 -0500 Subject: [PATCH 0001/1113] SOLR-8764: Remove deprecated methods and classes --- solr/CHANGES.txt | 2 + .../clustering/carrot2/CarrotParams.java | 10 +- .../carrot2/SolrResourceLocator.java | 4 +- .../solr/collection1/conf/solrconfig.xml | 5 - .../carrot2/CarrotClusteringEngineTest.java | 6 - .../dataimport/EntityProcessorWrapper.java | 12 -- .../TestContentStreamDataSource.java | 6 +- .../TestSolrEntityProcessorEndToEnd.java | 6 +- .../UIMAUpdateRequestProcessorTest.java | 6 +- .../solrj/embedded/JettySolrRunner.java | 120 +------------- .../java/org/apache/solr/core/NodeConfig.java | 20 --- .../java/org/apache/solr/core/SolrConfig.java | 8 +- .../java/org/apache/solr/core/SolrCore.java | 48 ------ .../solr/handler/MoreLikeThisHandler.java | 17 -- .../solr/handler/admin/AdminHandlers.java | 151 ------------------ .../handler/admin/LukeRequestHandler.java | 7 - .../apache/solr/internal/csv/CSVParser.java | 34 +--- .../apache/solr/internal/csv/CSVStrategy.java | 15 +- .../org/apache/solr/parser/CharStream.java | 16 -- .../apache/solr/parser/FastCharStream.java | 8 - .../solr/schema/AbstractSpatialFieldType.java | 23 +-- .../schema/RptWithGeometrySpatialField.java | 1 - .../org/apache/solr/schema/TrieField.java | 66 +------- .../apache/solr/search/SolrIndexSearcher.java | 5 - .../distance/GeoDistValueSourceParser.java | 2 - .../solr/update/UpdateShardHandler.java | 5 - .../UpdateRequestProcessorChain.java | 6 - .../org/apache/solr/util/DistanceUnits.java | 4 - .../solr/collection1/conf/schema_latest.xml | 2 +- .../solr/collection1/conf/solrconfig-sql.xml | 1 - .../apache/solr/TestSolrCoreProperties.java | 5 +- .../solr/cloud/BaseCdcrDistributedZkTest.java | 9 +- .../org/apache/solr/core/TestSolrXml.java | 4 - .../apache/solr/core/TestXIncludeConfig.java | 4 +- .../solr/handler/TestReplicationHandler.java | 7 +- .../handler/TestReplicationHandlerBackup.java | 8 +- .../apache/solr/handler/TestRestoreCore.java | 8 +- .../solr/schema/SpatialRPTFieldTypeTest.java | 56 +------ ...cExpirationUpdateProcessorFactoryTest.java | 11 +- .../SignatureUpdateProcessorFactoryTest.java | 12 +- ...elessScriptUpdateProcessorFactoryTest.java | 4 +- .../UniqFieldsUpdateProcessorFactoryTest.java | 3 +- .../UpdateRequestProcessorFactoryTest.java | 8 +- .../client/solrj/impl/HttpSolrClient.java | 5 - .../solrj/request/CollectionAdminRequest.java | 17 -- .../client/solrj/response/PivotField.java | 8 - .../solr/client/solrj/util/ClientUtils.java | 36 ----- .../solr/common/cloud/ClosableThread.java | 26 --- .../solr/common/cloud/ZkCmdExecutor.java | 5 - .../solr/common/cloud/ZkStateReader.java | 6 +- .../solr/common/util/IteratorChain.java | 86 ---------- .../solr/collection1/conf/solrconfig-sql.xml | 1 - .../solrj/beans/TestDocumentObjectBinder.java | 101 ++++-------- .../solr/common/util/IteratorChainTest.java | 104 ------------ .../java/org/apache/solr/SolrTestCaseHS.java | 13 +- solr/webapp/web/js/angular/app.js | 11 -- solr/webapp/web/js/scripts/app.js | 11 -- 57 files changed, 124 insertions(+), 1061 deletions(-) delete mode 100644 solr/core/src/java/org/apache/solr/handler/admin/AdminHandlers.java delete mode 100644 solr/solrj/src/java/org/apache/solr/common/cloud/ClosableThread.java delete mode 100644 solr/solrj/src/java/org/apache/solr/common/util/IteratorChain.java delete mode 100644 solr/solrj/src/test/org/apache/solr/common/util/IteratorChainTest.java diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 77511cd701b6..3120eeb45e72 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -349,6 +349,8 @@ Other Changes * SOLR-8758: Add a new SolrCloudTestCase class, using MiniSolrCloudCluster (Alan Woodward) + +* SOLR-8764: Remove all deprecated methods and classes from master prior to the 6.0 release. (Steve Rowe) ================== 5.5.1 ================== diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java index 42ff8da84024..71a22fe93430 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotParams.java @@ -46,14 +46,7 @@ public final class CarrotParams { public static String LANGUAGE_CODE_MAP = CARROT_PREFIX + "lcmap"; /** - * Use {@link #RESOURCES_DIR}. - */ - @Deprecated - public static String LEXICAL_RESOURCES_DIR = CARROT_PREFIX + "lexicalResourcesDir"; - - /** - * A replacement property pointing to Carrot2 resources - * (a more generic version of the deprecated {@link #LEXICAL_RESOURCES_DIR}). + * Points to Carrot2 resources */ public static String RESOURCES_DIR = CARROT_PREFIX + "resourcesDir"; @@ -72,7 +65,6 @@ public final class CarrotParams { NUM_DESCRIPTIONS, OUTPUT_SUB_CLUSTERS, - LEXICAL_RESOURCES_DIR, RESOURCES_DIR, LANGUAGE_CODE_MAP); diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java index b0783022d7dc..2cc67993a2ec 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java @@ -45,10 +45,8 @@ class SolrResourceLocator implements IResourceLocator { public SolrResourceLocator(SolrCore core, SolrParams initParams) { resourceLoader = core.getResourceLoader(); - @SuppressWarnings("deprecation") - String lexicalResourcesDir = initParams.get(CarrotParams.LEXICAL_RESOURCES_DIR); String resourcesDir = initParams.get(CarrotParams.RESOURCES_DIR); - carrot2ResourcesDir = firstNonNull(resourcesDir, lexicalResourcesDir, CarrotClusteringEngine.CARROT_RESOURCES_PREFIX); + carrot2ResourcesDir = firstNonNull(resourcesDir, CarrotClusteringEngine.CARROT_RESOURCES_PREFIX); } @SuppressWarnings("unchecked") diff --git a/solr/contrib/clustering/src/test-files/clustering/solr/collection1/conf/solrconfig.xml b/solr/contrib/clustering/src/test-files/clustering/solr/collection1/conf/solrconfig.xml index 7b2bd46c81ef..be629d80c0d7 100644 --- a/solr/contrib/clustering/src/test-files/clustering/solr/collection1/conf/solrconfig.xml +++ b/solr/contrib/clustering/src/test-files/clustering/solr/collection1/conf/solrconfig.xml @@ -347,11 +347,6 @@ org.apache.solr.handler.clustering.carrot2.LexicalResourcesCheckClusteringAlgorithm clustering/custom - - lexical-resource-check-custom-resource-dir-deprecated - org.apache.solr.handler.clustering.carrot2.LexicalResourcesCheckClusteringAlgorithm - clustering/custom - custom-duplicating-tokenizer org.apache.solr.handler.clustering.carrot2.EchoTokensClusteringAlgorithm diff --git a/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java b/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java index 302247f39e7b..752570de3c12 100644 --- a/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java +++ b/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java @@ -199,12 +199,6 @@ public void testLexicalResourcesFromSolrConfigCustomDir() throws Exception { "online,customsolrstopwordcustomdir,customsolrstoplabelcustomdir"); } - @Test - public void testLexicalResourcesFromSolrConfigCustomDirDeprecated() throws Exception { - checkLexicalResourcesFromSolrConfig("lexical-resource-check-custom-resource-dir-deprecated", - "online,customsolrstopwordcustomdir,customsolrstoplabelcustomdir"); - } - private void checkLexicalResourcesFromSolrConfig(String engineName, String wordsToCheck) throws IOException { ModifiableSolrParams params = new ModifiableSolrParams(); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java index c88c5775b8cf..6370d24cf278 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java @@ -318,22 +318,10 @@ public void setDatasource(DataSource datasource) { this.datasource = datasource; } - /** @deprecated will be removed in Solr 6; use {@link #isInitialized()} */ - @Deprecated - public boolean isInitalized() { - return initialized; - } - public boolean isInitialized() { return initialized; } - /** @deprecated will be removed in Solr 6; use {@link #setInitialized(boolean)} */ - @Deprecated - public void setInitalized(boolean initialized) { - this.initialized = initialized; - } - public void setInitialized(boolean initialized) { this.initialized = initialized; } diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java index b0b30c402148..453f5713709d 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java @@ -32,6 +32,7 @@ import java.io.File; import java.nio.file.Files; import java.util.List; +import java.util.Properties; /** * Test for ContentStreamDataSource @@ -173,8 +174,9 @@ public void setUp() throws Exception { } private JettySolrRunner createJetty(SolrInstance instance) throws Exception { - System.setProperty("solr.data.dir", instance.getDataDir()); - JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), buildJettyConfig("/solr")); + Properties nodeProperties = new Properties(); + nodeProperties.setProperty("solr.data.dir", instance.getDataDir()); + JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), nodeProperties, buildJettyConfig("/solr")); jetty.start(); return jetty; } diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java index 1253f1643e95..abefc493f6b1 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java @@ -38,6 +38,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Properties; /** * End-to-end test of SolrEntityProcessor. "Real" test using embedded Solr @@ -340,8 +341,9 @@ public void tearDown() throws Exception { } private JettySolrRunner createJetty(SolrInstance instance) throws Exception { - JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), buildJettyConfig("/solr")); - jetty.setDataDir(instance.getDataDir()); + Properties nodeProperties = new Properties(); + nodeProperties.setProperty("solr.data.dir", instance.getDataDir()); + JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), nodeProperties, buildJettyConfig("/solr")); jetty.start(); return jetty; } diff --git a/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java b/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java index 95feb365fa0a..5879c78e212f 100644 --- a/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java +++ b/solr/contrib/uima/src/test/org/apache/solr/uima/processor/UIMAUpdateRequestProcessorTest.java @@ -65,8 +65,7 @@ public void testProcessorConfiguration() { SolrCore core = h.getCore(); UpdateRequestProcessorChain chained = core.getUpdateProcessingChain("uima"); assertNotNull(chained); - UIMAUpdateRequestProcessorFactory factory = (UIMAUpdateRequestProcessorFactory) chained - .getFactories()[0]; + UIMAUpdateRequestProcessorFactory factory = (UIMAUpdateRequestProcessorFactory)chained.getProcessors().get(0); assertNotNull(factory); UpdateRequestProcessor processor = factory.getInstance(req(), null, null); assertTrue(processor instanceof UIMAUpdateRequestProcessor); @@ -77,8 +76,7 @@ public void testMultiMap() { SolrCore core = h.getCore(); UpdateRequestProcessorChain chained = core.getUpdateProcessingChain("uima-multi-map"); assertNotNull(chained); - UIMAUpdateRequestProcessorFactory factory = (UIMAUpdateRequestProcessorFactory) chained - .getFactories()[0]; + UIMAUpdateRequestProcessorFactory factory = (UIMAUpdateRequestProcessorFactory)chained.getProcessors().get(0); assertNotNull(factory); UpdateRequestProcessor processor = factory.getInstance(req(), null, null); assertTrue(processor instanceof UIMAUpdateRequestProcessor); diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java index 02954c4d4c35..88ea5677aef6 100644 --- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java +++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java @@ -114,13 +114,6 @@ public void destroy() { } } - private static Properties defaultNodeProperties(String solrconfigFilename, String schemaFilename) { - Properties props = new Properties(); - props.setProperty("solrconfig", solrconfigFilename); - props.setProperty("schema", schemaFilename); - return props; - } - /** * Create a new JettySolrRunner. * @@ -134,82 +127,6 @@ public JettySolrRunner(String solrHome, String context, int port) { this(solrHome, JettyConfig.builder().setContext(context).setPort(port).build()); } - /** - * @deprecated use {@link #JettySolrRunner(String,Properties,JettyConfig)} - */ - @Deprecated - public JettySolrRunner(String solrHome, String context, int port, String solrConfigFilename, String schemaFileName) { - this(solrHome, defaultNodeProperties(solrConfigFilename, schemaFileName), JettyConfig.builder() - .setContext(context) - .setPort(port) - .build()); - } - - /** - * @deprecated use {@link #JettySolrRunner(String,Properties,JettyConfig)} - */ - @Deprecated - public JettySolrRunner(String solrHome, String context, int port, - String solrConfigFilename, String schemaFileName, boolean stopAtShutdown) { - this(solrHome, defaultNodeProperties(solrConfigFilename, schemaFileName), - JettyConfig.builder() - .setContext(context) - .setPort(port) - .stopAtShutdown(stopAtShutdown) - .build()); - } - - /** - * Constructor taking an ordered list of additional (servlet holder -> path spec) mappings - * to add to the servlet context - * @deprecated use {@link JettySolrRunner#JettySolrRunner(String,Properties,JettyConfig)} - */ - @Deprecated - public JettySolrRunner(String solrHome, String context, int port, - String solrConfigFilename, String schemaFileName, boolean stopAtShutdown, - SortedMap extraServlets) { - this(solrHome, defaultNodeProperties(solrConfigFilename, schemaFileName), - JettyConfig.builder() - .setContext(context) - .setPort(port) - .stopAtShutdown(stopAtShutdown) - .withServlets(extraServlets) - .build()); - } - - /** - * @deprecated use {@link #JettySolrRunner(String,Properties,JettyConfig)} - */ - @Deprecated - public JettySolrRunner(String solrHome, String context, int port, String solrConfigFilename, String schemaFileName, - boolean stopAtShutdown, SortedMap extraServlets, SSLConfig sslConfig) { - this(solrHome, defaultNodeProperties(solrConfigFilename, schemaFileName), - JettyConfig.builder() - .setContext(context) - .setPort(port) - .stopAtShutdown(stopAtShutdown) - .withServlets(extraServlets) - .withSSLConfig(sslConfig) - .build()); - } - - /** - * @deprecated use {@link #JettySolrRunner(String,Properties,JettyConfig)} - */ - @Deprecated - public JettySolrRunner(String solrHome, String context, int port, String solrConfigFilename, String schemaFileName, - boolean stopAtShutdown, SortedMap extraServlets, SSLConfig sslConfig, - SortedMap, String> extraRequestFilters) { - this(solrHome, defaultNodeProperties(solrConfigFilename, schemaFileName), - JettyConfig.builder() - .setContext(context) - .setPort(port) - .stopAtShutdown(stopAtShutdown) - .withServlets(extraServlets) - .withFilters(extraRequestFilters) - .withSSLConfig(sslConfig) - .build()); - } /** * Construct a JettySolrRunner @@ -552,42 +469,17 @@ public static void main(String[] args) { } /** - * @deprecated set properties in the Properties passed to the constructor - */ - @Deprecated - public void setShards(String shardList) { - nodeProperties.setProperty("shard", shardList); - } - - /** - * @deprecated set properties in the Properties passed to the constructor - */ - @Deprecated - public void setDataDir(String dataDir) { - nodeProperties.setProperty("solr.data.dir", dataDir); - } - - /** - * @deprecated set properties in the Properties passed to the constructor - */ - @Deprecated - public void setUlogDir(String ulogDir) { - nodeProperties.setProperty("solr.ulog.dir", ulogDir); - } - - /** - * @deprecated set properties in the Properties passed to the constructor + * @return the Solr home directory of this JettySolrRunner */ - @Deprecated - public void setCoreNodeName(String coreNodeName) { - nodeProperties.setProperty("coreNodeName", coreNodeName); + public String getSolrHome() { + return solrHome; } /** - * @return the Solr home directory of this JettySolrRunner + * @return this node's properties */ - public String getSolrHome() { - return solrHome; + public Properties getNodeProperties() { + return nodeProperties; } private void waitForLoadingCoresToFinish(long timeoutMs) { diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java index 546f27c5f0ad..0783355cfe42 100644 --- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java +++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java @@ -106,26 +106,6 @@ public UpdateShardHandlerConfig getUpdateShardHandlerConfig() { return updateShardHandlerConfig; } - @Deprecated - public int getDistributedConnectionTimeout() { - return updateShardHandlerConfig.getDistributedConnectionTimeout(); - } - - @Deprecated - public int getDistributedSocketTimeout() { - return updateShardHandlerConfig.getDistributedSocketTimeout(); - } - - @Deprecated - public int getMaxUpdateConnections() { - return updateShardHandlerConfig.getMaxUpdateConnections(); - } - - @Deprecated - public int getMaxUpdateConnectionsPerHost() { - return updateShardHandlerConfig.getMaxUpdateConnectionsPerHost(); - } - public int getCoreLoadThreadCount() { return coreLoadThreads; } diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java index e66fc7e74045..0b4bac33678e 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java +++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java @@ -197,7 +197,6 @@ public SolrConfig(SolrResourceLoader loader, String name, InputSource is) if (hasDeprecatedIndexConfig) { throw new SolrException(ErrorCode.FORBIDDEN, " and configuration sections are discontinued. Use instead."); } else { - defaultIndexConfig = mainIndexConfig = null; indexConfigPrefix = "indexConfig"; } assertWarnOrFail("The config has been discontinued and NRT mode is always used by Solr." + @@ -214,7 +213,7 @@ public SolrConfig(SolrResourceLoader loader, String name, InputSource is) ); // Parse indexConfig section, using mainIndex as backup in case old config is used - indexConfig = new SolrIndexConfig(this, "indexConfig", mainIndexConfig); + indexConfig = new SolrIndexConfig(this, "indexConfig", null); booleanQueryMaxClauseCount = getInt("query/maxBooleanClauses", BooleanQuery.getMaxClauseCount()); log.info("Using Lucene MatchVersion: " + luceneMatchVersion); @@ -466,11 +465,6 @@ public SolrRequestParsers getRequestParsers() { // DocSet public final float hashSetInverseLoadFactor; public final int hashDocSetMaxSize; - // default & main index configurations, deprecated as of 3.6 - @Deprecated - public final SolrIndexConfig defaultIndexConfig; - @Deprecated - public final SolrIndexConfig mainIndexConfig; // IndexConfig settings public final SolrIndexConfig indexConfig; diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index 5d7ebdce8795..06d7da305d8f 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -166,9 +166,6 @@ public final class SolrCore implements SolrInfoMBean, Closeable { public Date getStartTimeStamp() { return startTime; } - @Deprecated - public long getStartTime() { return startTime.getTime(); } - public long getStartNanoTime() { return startNanoTime; } @@ -641,56 +638,11 @@ private UpdateHandler createUpdateHandler(String className, UpdateHandler update return createReloadedUpdateHandler(className, "Update Handler", updateHandler); } - /** - * Creates a new core and register it in the list of cores. - * If a core with the same name already exists, it will be stopped and replaced by this one. - * - * @param dataDir the index directory - * @param config a solr config instance - * @param schema a solr schema instance - * - * @since solr 1.3 - * @deprecated will be removed in the next release - */ - public SolrCore(String name, String dataDir, SolrConfig config, IndexSchema schema, CoreDescriptor cd) { - this(name, dataDir, config, schema, null, cd, null, null, null); - } - public SolrCore(CoreDescriptor cd, ConfigSet coreConfig) { this(cd.getName(), null, coreConfig.getSolrConfig(), coreConfig.getIndexSchema(), coreConfig.getProperties(), cd, null, null, null); } - /** - * Creates a new core that is to be loaded lazily. i.e. lazyLoad="true" in solr.xml - * - * @since solr 4.1 - * @deprecated will be removed in the next release - */ - public SolrCore(String name, CoreDescriptor coreDescriptor) { - this.coreDescriptor = coreDescriptor; - this.setName(name); - this.schema = null; - this.dataDir = null; - this.ulogDir = null; - this.solrConfig = null; - this.configSetProperties = null; - this.maxWarmingSearchers = 2; // we don't have a config yet, just pick a number. - this.slowQueryThresholdMillis = -1; - this.resourceLoader = null; - this.updateHandler = null; - this.isReloaded = true; - this.reqHandlers = null; - this.updateProcessorChains = null; - this.infoRegistry = null; - this.codec = null; - this.ruleExpiryLock = null; - this.memClassLoader = null; - this.directoryFactory = null; - this.solrCoreState = null; - this.restManager = null; - this.solrDelPolicy = null; - } /** * Creates a new core and register it in the list of cores. If a core with the diff --git a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java index ad28911e4089..89b0180127b1 100644 --- a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java @@ -434,23 +434,6 @@ public DocListAndSet getMoreLikeThis( Reader reader, int start, int rows, List getMoreLikeThese( DocList docs, int rows, int flags ) throws IOException - { - IndexSchema schema = searcher.getSchema(); - NamedList mlt = new SimpleOrderedMap<>(); - DocIterator iterator = docs.iterator(); - while( iterator.hasNext() ) { - int id = iterator.nextDoc(); - - DocListAndSet sim = getMoreLikeThis( id, 0, rows, null, null, flags ); - String name = schema.printableUniqueKey( reader.document( id ) ); - - mlt.add(name, sim.docList); - } - return mlt; - } - public NamedList getMoreLikeTheseQuery(DocList docs) throws IOException { IndexSchema schema = searcher.getSchema(); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlers.java b/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlers.java deleted file mode 100644 index 1965a5051245..000000000000 --- a/solr/core/src/java/org/apache/solr/handler/admin/AdminHandlers.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.handler.admin; - -import java.lang.invoke.MethodHandles; -import java.net.URL; -import java.util.Map; - -import org.apache.solr.common.SolrException; -import org.apache.solr.common.util.NamedList; -import org.apache.solr.core.SolrCore; -import org.apache.solr.handler.RequestHandlerBase; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.request.SolrRequestHandler; -import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.util.plugin.SolrCoreAware; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A special Handler that registers all standard admin handlers - * - * @since solr 1.3 - * @deprecated No need to register this request handler. All - * the plugins registered by this class are implicitly registered by the system - */ -@Deprecated -public class AdminHandlers extends RequestHandlerBase implements SolrCoreAware -{ - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - NamedList initArgs = null; - - private static class StandardHandler { - final String name; - final SolrRequestHandler handler; - - public StandardHandler( String n, SolrRequestHandler h ) - { - this.name = n; - this.handler = h; - } - } - - /** - * Save the args and pass them to each standard handler - */ - @Override - public void init(NamedList args) { - this.initArgs = args; - } - - @Override - public void inform(SolrCore core) - { - String path = null; - path = getPluginInfo().name; - if( path == null ) { - throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, - "The AdminHandler is not registered with the current core." ); - } - if( !path.startsWith( "/" ) ) { - throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, - "The AdminHandler needs to be registered to a path. Typically this is '/admin'" ); - } - // Remove the parent handler - core.registerRequestHandler(path, null); - if( !path.endsWith( "/" ) ) { - path += "/"; - } - - StandardHandler[] list = new StandardHandler[] { - new StandardHandler( "luke", new LukeRequestHandler() ), - new StandardHandler( "system", new SystemInfoHandler() ), - new StandardHandler( "mbeans", new SolrInfoMBeanHandler() ), - new StandardHandler( "plugins", new PluginInfoHandler() ), - new StandardHandler( "threads", new ThreadDumpHandler() ), - new StandardHandler( "properties", new PropertiesRequestHandler() ), - new StandardHandler( "logging", new LoggingHandler() ), - new StandardHandler( "file", new ShowFileRequestHandler() ) - }; - - for( StandardHandler handler : list ) { - if( core.getRequestHandler( path+handler.name ) == null ) { - handler.handler.init( initArgs ); - core.registerRequestHandler( path+handler.name, handler.handler ); - if( handler.handler instanceof SolrCoreAware ) { - ((SolrCoreAware)handler.handler).inform(core); - } - } - } - log.warn(" is deprecated . It is not required anymore"); - } - - - @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) { - throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, - "The AdminHandler should never be called directly" ); - } - - //////////////////////// SolrInfoMBeans methods ////////////////////// - - @Override - public String getDescription() { - return "Register Standard Admin Handlers"; - } - - @Override - public String getVersion() { - return getClass().getPackage().getSpecificationVersion(); - } - - @Override - public String getSource() { - return null; - } - - @Override - public Category getCategory() { - return Category.QUERYHANDLER; - } - - @Override - public URL[] getDocs() { - return null; - } - - @Override - public String getName() { - return this.getClass().getName(); - } - - @Override - public NamedList getStatistics() { - return null; - } -} diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java index 4e497780e6e8..0ec6d79fecbb 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java @@ -562,13 +562,6 @@ private static void populateFieldInfo(IndexSchema schema, typeusemap.put( ft.getTypeName(), v ); } - /** - * @deprecated use {@link #getIndexInfo(DirectoryReader)} since you now have to explicitly pass the "fl" prameter - * and this was always called with "false" anyway from CoreAdminHandler - */ - public static SimpleOrderedMap getIndexInfo(DirectoryReader reader, boolean detail) throws IOException { - return getIndexInfo(reader); - } // This method just gets the top-most level of information. This was conflated with getting detailed info // for *all* the fields, called from CoreAdminHandler etc. diff --git a/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java b/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java index 8d88c3727107..ad1aef71f6c5 100644 --- a/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java +++ b/solr/core/src/java/org/apache/solr/internal/csv/CSVParser.java @@ -111,41 +111,9 @@ Token reset() { * @param input a Reader containing "csv-formatted" input */ public CSVParser(Reader input) { - // note: must match default-CSV-strategy !! - this(input, ','); + this(input, CSVStrategy.DEFAULT_STRATEGY); } - /** - * Customized value delimiter parser. - * - * The parser follows the default {@link CSVStrategy} - * except for the delimiter setting. - * - * @param input a Reader based on "csv-formatted" input - * @param delimiter a Char used for value separation - * @deprecated use {@link #CSVParser(Reader,CSVStrategy)}. - */ - public CSVParser(Reader input, char delimiter) { - this(input, delimiter, '"', CSVStrategy.COMMENTS_DISABLED); - } - - /** - * Customized csv parser. - * - * The parser parses according to the given CSV dialect settings. - * Leading whitespaces are truncated, unicode escapes are - * not interpreted and empty lines are ignored. - * - * @param input a Reader based on "csv-formatted" input - * @param delimiter a Char used for value separation - * @param encapsulator a Char used as value encapsulation marker - * @param commentStart a Char used for comment identification - * @deprecated use {@link #CSVParser(Reader,CSVStrategy)}. - */ - public CSVParser(Reader input, char delimiter, char encapsulator, char commentStart) { - this(input, new CSVStrategy(delimiter, encapsulator, commentStart)); - } - /** * Customized CSV parser using the given {@link CSVStrategy} * diff --git a/solr/core/src/java/org/apache/solr/internal/csv/CSVStrategy.java b/solr/core/src/java/org/apache/solr/internal/csv/CSVStrategy.java index 9273908b0e8e..e27c9c372610 100644 --- a/solr/core/src/java/org/apache/solr/internal/csv/CSVStrategy.java +++ b/solr/core/src/java/org/apache/solr/internal/csv/CSVStrategy.java @@ -54,7 +54,7 @@ public class CSVStrategy implements Cloneable, Serializable { public CSVStrategy(char delimiter, char encapsulator, char commentStart) { - this(delimiter, encapsulator, commentStart, true, false, true); + this(delimiter, encapsulator, commentStart, ESCAPE_DISABLED, true, true, false, true); } /** @@ -89,19 +89,6 @@ public CSVStrategy( setIgnoreEmptyLines(ignoreEmptyLines); } - /** @deprecated Use {@link #CSVStrategy(char, char, char, char, boolean, boolean, boolean, boolean)} */ - public CSVStrategy( - char delimiter, - char encapsulator, - char commentStart, - boolean ignoreLeadingWhitespace, - boolean interpretUnicodeEscapes, - boolean ignoreEmptyLines) - { - this(delimiter, encapsulator, commentStart, CSVStrategy.ESCAPE_DISABLED, ignoreLeadingWhitespace, - true, interpretUnicodeEscapes, ignoreEmptyLines); - } - public void setDelimiter(char delimiter) { this.delimiter = delimiter; } public char getDelimiter() { return this.delimiter; } diff --git a/solr/core/src/java/org/apache/solr/parser/CharStream.java b/solr/core/src/java/org/apache/solr/parser/CharStream.java index 0400af47e050..9bec5a6e9d52 100644 --- a/solr/core/src/java/org/apache/solr/parser/CharStream.java +++ b/solr/core/src/java/org/apache/solr/parser/CharStream.java @@ -27,22 +27,6 @@ interface CharStream { */ char readChar() throws java.io.IOException; - @Deprecated - /** - * Returns the column position of the character last read. - * @deprecated - * @see #getEndColumn - */ - int getColumn(); - - @Deprecated - /** - * Returns the line number of the character last read. - * @deprecated - * @see #getEndLine - */ - int getLine(); - /** * Returns the column number of the last character for current token (being * matched after the last call to BeginTOken). diff --git a/solr/core/src/java/org/apache/solr/parser/FastCharStream.java b/solr/core/src/java/org/apache/solr/parser/FastCharStream.java index 6a283f20d09e..712a28335b55 100644 --- a/solr/core/src/java/org/apache/solr/parser/FastCharStream.java +++ b/solr/core/src/java/org/apache/solr/parser/FastCharStream.java @@ -105,14 +105,6 @@ public final void Done() { } } - @Override - public final int getColumn() { - return bufferStart + bufferPosition; - } - @Override - public final int getLine() { - return 1; - } @Override public final int getEndColumn() { return bufferStart + bufferPosition; diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java index c86f69153da3..83fd44775f41 100644 --- a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java +++ b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java @@ -91,8 +91,6 @@ public abstract class AbstractSpatialFieldType extend private final Cache fieldStrategyCache = CacheBuilder.newBuilder().build(); protected DistanceUnits distanceUnits; - @Deprecated - protected String units; // for back compat; hopefully null protected final Set supportedScoreModes; @@ -120,30 +118,11 @@ protected void init(IndexSchema schema, Map args) { ctx = SpatialContextFactory.makeSpatialContext(argsWrap, schema.getResourceLoader().getClassLoader()); args.keySet().removeAll(argsWrap.getSeenKeys()); } - - final String unitsErrMsg = "units parameter is deprecated, please use distanceUnits instead for field types with class " + - getClass().getSimpleName(); - this.units = args.remove("units");//deprecated - if (units != null) { - if ("degrees".equals(units)) { - log.warn(unitsErrMsg); - } else { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, unitsErrMsg); - } - } final String distanceUnitsStr = args.remove("distanceUnits"); if (distanceUnitsStr == null) { - if (units != null) { - this.distanceUnits = DistanceUnits.BACKCOMPAT; - } else { - this.distanceUnits = ctx.isGeo() ? DistanceUnits.KILOMETERS : DistanceUnits.DEGREES; - } + this.distanceUnits = ctx.isGeo() ? DistanceUnits.KILOMETERS : DistanceUnits.DEGREES; } else { - // If both units and distanceUnits was specified - if (units != null) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, unitsErrMsg); - } this.distanceUnits = parseDistanceUnits(distanceUnitsStr); if (this.distanceUnits == null) throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, diff --git a/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java b/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java index 55fcf73d868c..fe4cedc04c58 100644 --- a/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java +++ b/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java @@ -68,7 +68,6 @@ protected void init(IndexSchema schema, Map args) { rptFieldType.argsParser = argsParser = newSpatialArgsParser(); this.ctx = rptFieldType.ctx; this.distanceUnits = rptFieldType.distanceUnits; - this.units = rptFieldType.units; } @Override diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java index b369e999a898..251eb8544d3a 100644 --- a/solr/core/src/java/org/apache/solr/schema/TrieField.java +++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java @@ -117,23 +117,8 @@ public Object toObject(IndexableField f) { if (val != null) { return (type == TrieTypes.DATE) ? new Date(val.longValue()) : val; } else { - // the following code is "deprecated" and only to support pre-3.2 indexes using the old BinaryField encoding: - final BytesRef bytes = f.binaryValue(); - if (bytes==null) return badFieldString(f); - switch (type) { - case INTEGER: - return toInt(bytes.bytes, bytes.offset); - case FLOAT: - return Float.intBitsToFloat(toInt(bytes.bytes, bytes.offset)); - case LONG: - return toLong(bytes.bytes, bytes.offset); - case DOUBLE: - return Double.longBitsToDouble(toLong(bytes.bytes, bytes.offset)); - case DATE: - return new Date(toLong(bytes.bytes, bytes.offset)); - default: - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + f.name()); - } + // the old BinaryField encoding is no longer supported + return badFieldString(f); } } @@ -432,18 +417,6 @@ public Query getFieldQuery(QParser parser, SchemaField field, String externalVal } } - @Deprecated - static int toInt(byte[] arr, int offset) { - return (arr[offset]<<24) | ((arr[offset+1]&0xff)<<16) | ((arr[offset+2]&0xff)<<8) | (arr[offset+3]&0xff); - } - - @Deprecated - static long toLong(byte[] arr, int offset) { - int high = (arr[offset]<<24) | ((arr[offset+1]&0xff)<<16) | ((arr[offset+2]&0xff)<<8) | (arr[offset+3]&0xff); - int low = (arr[offset+4]<<24) | ((arr[offset+5]&0xff)<<16) | ((arr[offset+6]&0xff)<<8) | (arr[offset+7]&0xff); - return (((long)high)<<32) | (low&0x0ffffffffL); - } - @Override public String storedToReadable(IndexableField f) { return toExternal(f); @@ -596,39 +569,8 @@ private void storedToIndexed(IndexableField f, final BytesRefBuilder bytes) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + f.name()); } } else { - // the following code is "deprecated" and only to support pre-3.2 indexes using the old BinaryField encoding: - final BytesRef bytesRef = f.binaryValue(); - if (bytesRef==null) - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid field contents: "+f.name()); - switch (type) { - case INTEGER: - LegacyNumericUtils.intToPrefixCoded(toInt(bytesRef.bytes, bytesRef.offset), 0, bytes); - break; - case FLOAT: { - // WARNING: Code Duplication! Keep in sync with o.a.l.util.LegacyNumericUtils! - // copied from LegacyNumericUtils to not convert to/from float two times - // code in next 2 lines is identical to: int v = LegacyNumericUtils.floatToSortableInt(Float.intBitsToFloat(toInt(arr))); - int v = toInt(bytesRef.bytes, bytesRef.offset); - if (v<0) v ^= 0x7fffffff; - LegacyNumericUtils.intToPrefixCoded(v, 0, bytes); - break; - } - case LONG: //fallthrough! - case DATE: - LegacyNumericUtils.longToPrefixCoded(toLong(bytesRef.bytes, bytesRef.offset), 0, bytes); - break; - case DOUBLE: { - // WARNING: Code Duplication! Keep in sync with o.a.l.util.LegacyNumericUtils! - // copied from LegacyNumericUtils to not convert to/from double two times - // code in next 2 lines is identical to: long v = LegacyNumericUtils.doubleToSortableLong(Double.longBitsToDouble(toLong(arr))); - long v = toLong(bytesRef.bytes, bytesRef.offset); - if (v<0) v ^= 0x7fffffffffffffffL; - LegacyNumericUtils.longToPrefixCoded(v, 0, bytes); - break; - } - default: - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + f.name()); - } + // the old BinaryField encoding is no longer supported + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid field contents: "+f.name()); } } diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index da0c7bf5e665..833230977ccf 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -2392,11 +2392,6 @@ public long getOpenNanoTime() { return openNanoTime; } - @Deprecated - public long getOpenTime() { - return openTime.getTime(); - } - @Override public Explanation explain(Query query, int doc) throws IOException { return super.explain(QueryUtils.makeQueryable(query), doc); diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java index f7c05c3d30bb..1f4ea34ff955 100644 --- a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java +++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java @@ -135,8 +135,6 @@ public ValueSource parse(FunctionQParser fp) throws SyntaxError { SpatialStrategy strategy = ((SpatialStrategyMultiValueSource) mv2).strategy; DistanceUnits distanceUnits = ((SpatialStrategyMultiValueSource) mv2).distanceUnits; Point queryPoint = strategy.getSpatialContext().makePoint(constants[1], constants[0]); - if (distanceUnits == DistanceUnits.BACKCOMPAT) - distanceUnits = DistanceUnits.KILOMETERS; return strategy.makeDistanceValueSource(queryPoint, distanceUnits.multiplierFromDegreesToThisUnit()); } diff --git a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java index 5e805ca6821a..a44b8f87b766 100644 --- a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java +++ b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java @@ -60,11 +60,6 @@ public class UpdateShardHandler { private final UpdateShardHandlerConfig cfg; - @Deprecated - public UpdateShardHandler(NodeConfig cfg) { - this(cfg.getUpdateShardHandlerConfig()); - } - public UpdateShardHandler(UpdateShardHandlerConfig cfg) { this.cfg = cfg; clientConnectionManager = new PoolingClientConnectionManager(SchemeRegistryFactory.createSystemDefault()); diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java index ca2e5ce6340f..4629425aa822 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java @@ -220,12 +220,6 @@ public UpdateRequestProcessor createProcessor(SolrQueryRequest req, return last; } - - @Deprecated - public UpdateRequestProcessorFactory[] getFactories() { - return chain.toArray(new UpdateRequestProcessorFactory[0]); - } - /** * Returns the underlying array of factories used in this chain. * Modifications to the array will affect future calls to diff --git a/solr/core/src/java/org/apache/solr/util/DistanceUnits.java b/solr/core/src/java/org/apache/solr/util/DistanceUnits.java index 7785bc9466c5..63f43e4652fb 100644 --- a/solr/core/src/java/org/apache/solr/util/DistanceUnits.java +++ b/solr/core/src/java/org/apache/solr/util/DistanceUnits.java @@ -41,10 +41,6 @@ public class DistanceUnits { DistanceUtils.MILES_TO_KM * DistanceUtils.KM_TO_DEG); public final static DistanceUnits DEGREES = new DistanceUnits(DEGREES_PARAM, 180.0/Math.PI, 1.0); - // Previously, distance based filtering was done with km, but scores were based on degrees - @Deprecated - public final static DistanceUnits BACKCOMPAT = new DistanceUnits("backcompat", DistanceUtils.EARTH_MEAN_RADIUS_KM, 1.0); - //volatile so other threads see when we replace when copy-on-write private static volatile Map instances = ImmutableMap.of( KILOMETERS_PARAM, KILOMETERS, diff --git a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml index fc8f7c11dd1f..803d45eed770 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml @@ -718,7 +718,7 @@ http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4 --> + geo="true" distErrPct="0.025" maxDistErr="0.000009" /> '.esc() + "\n" + - ''.esc() + - '' - ); }; */ diff --git a/solr/webapp/web/js/scripts/app.js b/solr/webapp/web/js/scripts/app.js index 32382b07ec5c..a967cfe307a1 100644 --- a/solr/webapp/web/js/scripts/app.js +++ b/solr/webapp/web/js/scripts/app.js @@ -479,17 +479,6 @@ var solr_admin = function( app_config ) }, error : function() { - show_global_error - ( - '

Unable to load environment info from ' + system_url.esc() + '.

' + - '

This interface requires that you activate the admin request handlers in all SolrCores by adding the ' + - 'following configuration to your solrconfig.xml:

' + "\n" + - - '
' +
-                  ''.esc() + "\n" +
-                  ''.esc() +
-                  '
' - ); }, complete : function() { From 3cbc48ed3085ed2bac5096d6828a7f129003619d Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Wed, 2 Mar 2016 11:25:27 -0500 Subject: [PATCH 0002/1113] LUCENE-7059: always visit 1D points in sorted order; fix tie-break but in BKDWriter; fix BKDWriter to pass on maxMBSortInHeap to the OfflineSorter too --- .../org/apache/lucene/index/CheckIndex.java | 23 +- .../org/apache/lucene/index/PointValues.java | 4 +- .../lucene/index/PointValuesWriter.java | 1 + .../index/SlowCompositeReaderWrapper.java | 2 +- .../apache/lucene/search/PointInSetQuery.java | 13 - .../org/apache/lucene/util/OfflineSorter.java | 2 +- .../org/apache/lucene/util/bkd/BKDWriter.java | 41 ++- .../lucene/index/TestDuelingCodecs.java | 4 +- .../apache/lucene/index/TestPointValues.java | 62 +++++ .../lucene/search/TestPointQueries.java | 37 +-- .../facet/range/TestRangeFacetCounts.java | 8 +- .../apache/lucene/index/SorterTestBase.java | 6 +- .../lucene/document/TestBigIntegerPoint.java | 4 +- .../lucene/document/TestInetAddressPoint.java | 4 +- .../lucene/document/TestLatLonPoint.java | 2 +- .../TestLatLonPointDistanceQuery.java | 2 +- .../search/TestDocValuesRangeQuery.java | 4 +- .../asserting/AssertingPointFormat.java | 86 +++++- .../lucene/index/BasePointFormatTestCase.java | 262 ++++++++++-------- .../apache/lucene/util/LuceneTestCase.java | 111 ++++---- 20 files changed, 440 insertions(+), 238 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java index 0c244617e525..9f711df4120c 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java +++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java @@ -1706,8 +1706,12 @@ public static Status.PointsStatus testPoints(CodecReader reader, PrintStream inf lastMaxPacked.length = bytesPerDim; lastMinPacked.length = bytesPerDim; scratch.length = bytesPerDim; + byte[] lastPackedValue = new byte[dimCount*bytesPerDim]; values.intersect(fieldInfo.name, new PointValues.IntersectVisitor() { + + private int lastDocID = -1; + @Override public void visit(int docID) { throw new RuntimeException("codec called IntersectVisitor.visit without a packed value for docID=" + docID); @@ -1725,12 +1729,27 @@ public void visit(int docID, byte[] packedValue) { if (scratch.compareTo(lastMinPacked) < 0) { // This doc's point, in this dimension, is lower than the minimum value of the last cell checked: - throw new RuntimeException("packed value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim); + throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim); } if (scratch.compareTo(lastMaxPacked) > 0) { // This doc's point, in this dimension, is greater than the maximum value of the last cell checked: - throw new RuntimeException("packed value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim); + throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-bounds of the last cell min=" + Arrays.toString(lastMinPackedValue) + " max=" + Arrays.toString(lastMaxPackedValue) + " dim=" + dim); + } + } + + // In the 1D case, PointValues must make a single in-order sweep through all values, and tie-break by + // increasing docID: + if (dimCount == 1) { + int cmp = StringHelper.compare(bytesPerDim, lastPackedValue, 0, packedValue, 0); + if (cmp > 0) { + throw new RuntimeException("packed points value " + Arrays.toString(packedValue) + " for docID=" + docID + " is out-of-order vs the previous document's value " + Arrays.toString(lastPackedValue)); + } else if (cmp == 0) { + if (docID < lastDocID) { + throw new RuntimeException("packed points value is the same, but docID=" + docID + " is out of order vs previous docID=" + lastDocID); + } } + System.arraycopy(packedValue, 0, lastPackedValue, 0, bytesPerDim); + lastDocID = docID; } status.totalValuePoints++; diff --git a/lucene/core/src/java/org/apache/lucene/index/PointValues.java b/lucene/core/src/java/org/apache/lucene/index/PointValues.java index 108dde31050a..34d315fdde5a 100644 --- a/lucene/core/src/java/org/apache/lucene/index/PointValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/PointValues.java @@ -61,7 +61,9 @@ public interface IntersectVisitor { void visit(int docID) throws IOException; /** Called for all documents in a leaf cell that crosses the query. The consumer - * should scrutinize the packedValue to decide whether to accept it. */ + * should scrutinize the packedValue to decide whether to accept it. In the 1D case, + * values are visited in increasing order, and in the case of ties, in increasing + * docID order. */ void visit(int docID, byte[] packedValue) throws IOException; /** Called for non-leaf cells to test how the cell relates to the query, to diff --git a/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java index 283f7bdbea0f..2fa8b4fa2b15 100644 --- a/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java @@ -43,6 +43,7 @@ public PointValuesWriter(DocumentsWriterPerThread docWriter, FieldInfo fieldInfo packedValue = new byte[fieldInfo.getPointDimensionCount() * fieldInfo.getPointNumBytes()]; } + // TODO: if exactly the same value is added to exactly the same doc, should we dedup? public void addPackedValue(int docID, BytesRef value) { if (value == null) { throw new IllegalArgumentException("field=" + fieldInfo.name + ": point value cannot be null"); diff --git a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java index db9579cc13d4..e44c53cb54ea 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java +++ b/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java @@ -236,7 +236,7 @@ public Bits getLiveDocs() { @Override public PointValues getPointValues() { ensureOpen(); - return MultiPointValues.get(in); + return null; } @Override diff --git a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java index 42f5bb19ff0b..3d6086c90858 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java @@ -172,10 +172,6 @@ public MergePointVisitor(PrefixCodedTerms sortedPackedPoints, DocIdSetBuilder re this.sortedPackedPoints = sortedPackedPoints; lastMaxPackedValue = new byte[bytesPerDim]; scratch.length = bytesPerDim; - resetIterator(); - } - - private void resetIterator() { this.iterator = sortedPackedPoints.iterator(); nextQueryPoint = iterator.next(); } @@ -211,15 +207,6 @@ public void visit(int docID, byte[] packedValue) { @Override public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - - // NOTE: this is messy ... we need it in cases where a single vistor (us) is shared across multiple leaf readers - // (e.g. SlowCompositeReaderWrapper), in which case we need to reset our iterator to re-start the merge sort. Maybe we should instead - // add an explicit .start() to IntersectVisitor, and clarify the semantics that in the 1D case all cells will be visited in order? - if (StringHelper.compare(bytesPerDim, lastMaxPackedValue, 0, minPackedValue, 0) > 0) { - resetIterator(); - } - System.arraycopy(maxPackedValue, 0, lastMaxPackedValue, 0, bytesPerDim); - while (nextQueryPoint != null) { scratch.bytes = minPackedValue; int cmpMin = nextQueryPoint.compareTo(scratch); diff --git a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java index b14610225f70..283dc1f90cb6 100644 --- a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java +++ b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java @@ -120,7 +120,7 @@ public static BufferSize automatic() { final long minBufferSizeBytes = MIN_BUFFER_SIZE_MB*MB; if (sortBufferByteSize < minBufferSizeBytes || totalAvailableBytes > 10 * minBufferSizeBytes) { // lets see if we need/should to grow the heap - if (totalAvailableBytes/2 > minBufferSizeBytes){ // there is enough mem for a reasonable buffer + if (totalAvailableBytes/2 > minBufferSizeBytes) { // there is enough mem for a reasonable buffer sortBufferByteSize = totalAvailableBytes/2; // grow the heap } else { //heap seems smallish lets be conservative fall back to the free/2 diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java index 4397f2e1ab1c..9d6ad978878c 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java @@ -102,6 +102,7 @@ public class BKDWriter implements Closeable { final TrackingDirectoryWrapper tempDir; final String tempFileNamePrefix; + final double maxMBSortInHeap; final byte[] scratchDiff; final byte[] scratchPackedValue; @@ -169,6 +170,8 @@ public BKDWriter(Directory tempDir, String tempFileNamePrefix, int numDims, int // We write first maxPointsSortInHeap in heap, then cutover to offline for additional points: heapPointWriter = new HeapPointWriter(16, maxPointsSortInHeap, packedBytesLength); + + this.maxMBSortInHeap = maxMBSortInHeap; } public static void verifyParams(int numDims, int maxPointsInLeafNode, double maxMBSortInHeap) { @@ -550,7 +553,7 @@ private void sortHeapPointWriter(final HeapPointWriter writer, int start, int le //int[] swapCount = new int[1]; //int[] cmpCount = new int[1]; - //System.out.println("SORT length=" + length); + // System.out.println("SORT length=" + length); // All buffered points are still in heap; just do in-place sort: new IntroSorter() { @@ -623,13 +626,11 @@ protected int compare(int i, int j) { return cmp; } - // Tie-break - cmp = Integer.compare(writer.docIDs[i], writer.docIDs[j]); - if (cmp != 0) { - return cmp; - } + // Tie-break by docID: - return Long.compare(writer.ords[i], writer.ords[j]); + // No need to tie break on ord, for the case where the same doc has the same value in a given dimension indexed more than once: it + // can't matter at search time since we don't write ords into the index: + return Integer.compare(writer.docIDs[i], writer.docIDs[j]); } }.sort(start, start+length); //System.out.println("LEN=" + length + " SWAP=" + swapCount[0] + " CMP=" + cmpCount[0]); @@ -679,29 +680,23 @@ public int compare(BytesRef a, BytesRef b) { return cmp; } - // Tie-break by docID and then ord: - reader.reset(a.bytes, a.offset + packedBytesLength, a.length); - final int docIDA = reader.readVInt(); - final long ordA = reader.readVLong(); + // Tie-break by docID: + reader.reset(a.bytes, a.offset + packedBytesLength + Long.BYTES, a.length); + final int docIDA = reader.readInt(); - reader.reset(b.bytes, b.offset + packedBytesLength, b.length); - final int docIDB = reader.readVInt(); - final long ordB = reader.readVLong(); - - cmp = Integer.compare(docIDA, docIDB); - if (cmp != 0) { - return cmp; - } + reader.reset(b.bytes, b.offset + packedBytesLength + Long.BYTES, b.length); + final int docIDB = reader.readInt(); - // TODO: is this really necessary? If OfflineSorter is stable, we can safely return 0 here, and avoid writing ords? - return Long.compare(ordA, ordB); + // No need to tie break on ord, for the case where the same doc has the same value in a given dimension indexed more than once: it + // can't matter at search time since we don't write ords into the index: + return Integer.compare(docIDA, docIDB); } }; // TODO: this is sort of sneaky way to get the final OfflinePointWriter from OfflineSorter: IndexOutput[] lastWriter = new IndexOutput[1]; - OfflineSorter sorter = new OfflineSorter(tempDir, tempFileNamePrefix, cmp) { + OfflineSorter sorter = new OfflineSorter(tempDir, tempFileNamePrefix, cmp, OfflineSorter.BufferSize.megabytes(Math.max(1, (long) maxMBSortInHeap)), OfflineSorter.MAX_TEMPFILES) { /** We write/read fixed-byte-width file that {@link OfflinePointReader} can read. */ @Override @@ -753,7 +748,7 @@ private void checkMaxLeafNodeCount(int numLeaves) { /** Writes the BKD tree to the provided {@link IndexOutput} and returns the file offset where index was written. */ public long finish(IndexOutput out) throws IOException { - //System.out.println("\nBKDTreeWriter.finish pointCount=" + pointCount + " out=" + out + " heapWriter=" + heapWriter); + // System.out.println("\nBKDTreeWriter.finish pointCount=" + pointCount + " out=" + out + " heapWriter=" + heapPointWriter); // TODO: specialize the 1D case? it's much faster at indexing time (no partitioning on recruse...) diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java index 62fe28aedb24..b79e638ba078 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java @@ -162,8 +162,8 @@ public void testCrazyReaderEquals() throws IOException { createRandomIndex(numdocs, leftWriter, seed); createRandomIndex(numdocs, rightWriter, seed); - leftReader = wrapReader(leftWriter.getReader()); - rightReader = wrapReader(rightWriter.getReader()); + leftReader = wrapReader(leftWriter.getReader(), false); + rightReader = wrapReader(rightWriter.getReader(), false); // check that our readers are valid TestUtil.checkReader(leftReader); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java index 3e1fe2d1ac83..7551d3c3f2d5 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java @@ -17,8 +17,16 @@ package org.apache.lucene.index; +import java.io.IOException; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.PointFormat; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; +import org.apache.lucene.codecs.lucene60.Lucene60PointReader; +import org.apache.lucene.codecs.lucene60.Lucene60PointWriter; import org.apache.lucene.document.BinaryPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; @@ -26,6 +34,9 @@ import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.index.PointValues; import org.apache.lucene.store.Directory; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; @@ -478,4 +489,55 @@ public void testInvalidDoublePointUsage() throws Exception { field.numericValue(); }); } + + public void testTieBreakByDocID() throws Exception { + Directory dir = newFSDirectory(createTempDir()); + IndexWriterConfig iwc = newIndexWriterConfig(); + IndexWriter w = new IndexWriter(dir, iwc); + Document doc = new Document(); + doc.add(new IntPoint("int", 17)); + for(int i=0;i<300000;i++) { + w.addDocument(doc); + if (false && random().nextInt(1000) == 17) { + w.commit(); + } + } + + IndexReader r = DirectoryReader.open(w); + + for(LeafReaderContext ctx : r.leaves()) { + PointValues points = ctx.reader().getPointValues(); + points.intersect("int", + new IntersectVisitor() { + + int lastDocID = -1; + + @Override + public void visit(int docID) { + if (docID < lastDocID) { + fail("docs out of order: docID=" + docID + " but lastDocID=" + lastDocID); + } + lastDocID = docID; + } + + @Override + public void visit(int docID, byte[] packedValue) { + visit(docID); + } + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + if (random().nextBoolean()) { + return Relation.CELL_CROSSES_QUERY; + } else { + return Relation.CELL_INSIDE_QUERY; + } + } + }); + } + + r.close(); + w.close(); + dir.close(); + } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java index 8a578fa5779f..fb3179298fb2 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java @@ -53,11 +53,13 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -360,7 +362,7 @@ private static void verifyLongs(long[] values, int[] ids) throws Exception { final IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); int numThreads = TestUtil.nextInt(random(), 2, 5); @@ -627,7 +629,7 @@ private void verifyBinary(byte[][][] docValues, int[] ids, int numBytesPerDim) t final IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); int numThreads = TestUtil.nextInt(random(), 2, 5); @@ -847,7 +849,7 @@ public void testMinMaxLong() throws Exception { IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, 0L, true))); assertEquals(1, s.count(LongPoint.newRangeQuery("value", 0L, true, Long.MAX_VALUE, true))); @@ -885,7 +887,7 @@ public void testBasicSortedSet() throws Exception { IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", toUTF8("aaa"), @@ -950,7 +952,7 @@ public void testLongMinMaxNumeric() throws Exception { IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true))); assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, false))); @@ -976,7 +978,6 @@ public void testLongMinMaxSortedSet() throws Exception { IndexReader r = w.getReader(); - // We can't wrap with "exotic" readers because the query must see the RangeTreeDVFormat: IndexSearcher s = newSearcher(r, false); assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true))); @@ -1003,7 +1004,7 @@ public void testSortedSetNoOrdsMatch() throws Exception { IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(0, s.count(BinaryPoint.newRangeQuery("value", toUTF8("m"), true, toUTF8("n"), false))); assertEquals(2, s.count(BinaryPoint.newRangeQuery("value", (byte[]) null, true, null, true))); @@ -1040,7 +1041,7 @@ public void testNoDocs() throws Exception { IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(0, s.count(LongPoint.newRangeQuery("value", 17L, true, 13L, false))); IOUtils.close(r, w, dir); @@ -1207,7 +1208,7 @@ public void testExactPoints() throws Exception { w.addDocument(doc); IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(1, s.count(IntPoint.newExactQuery("int", 42))); assertEquals(0, s.count(IntPoint.newExactQuery("int", 41))); @@ -1328,7 +1329,7 @@ public void testRandomPointInSetQuery() throws Exception { final IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); int numThreads = TestUtil.nextInt(random(), 2, 5); @@ -1467,7 +1468,7 @@ public void testBasicMultiDimPointInSetQuery() throws Exception { doc.add(new IntPoint("int", 17, 42)); w.addDocument(doc); IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(0, s.count(newMultiDimIntSetQuery("int", 2, 17, 41))); assertEquals(1, s.count(newMultiDimIntSetQuery("int", 2, 17, 42))); @@ -1490,7 +1491,7 @@ public void testBasicMultiValueMultiDimPointInSetQuery() throws Exception { doc.add(new IntPoint("int", 34, 79)); w.addDocument(doc); IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(0, s.count(newMultiDimIntSetQuery("int", 2, 17, 41))); assertEquals(1, s.count(newMultiDimIntSetQuery("int", 2, 17, 42))); @@ -1523,7 +1524,7 @@ public void testManyEqualValuesMultiDimPointInSetQuery() throws Exception { w.addDocument(doc); } IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(zeroCount, s.count(newMultiDimIntSetQuery("int", 2, 0, 0))); assertEquals(10000-zeroCount, s.count(newMultiDimIntSetQuery("int", 2, 1, 1))); @@ -1573,7 +1574,7 @@ public void testBasicPointInSetQuery() throws Exception { w.addDocument(doc); IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(0, s.count(IntPoint.newSetQuery("int", 16))); assertEquals(1, s.count(IntPoint.newSetQuery("int", 17))); assertEquals(3, s.count(IntPoint.newSetQuery("int", 17, 97, 42))); @@ -1634,7 +1635,7 @@ public void testBasicMultiValuedPointInSetQuery() throws Exception { w.addDocument(doc); IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(0, s.count(IntPoint.newSetQuery("int", 16))); assertEquals(1, s.count(IntPoint.newSetQuery("int", 17))); assertEquals(1, s.count(IntPoint.newSetQuery("int", 17, 97, 42))); @@ -1685,7 +1686,7 @@ public void testEmptyPointInSetQuery() throws Exception { w.addDocument(doc); IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(0, s.count(IntPoint.newSetQuery("int"))); assertEquals(0, s.count(LongPoint.newSetQuery("long"))); assertEquals(0, s.count(FloatPoint.newSetQuery("float"))); @@ -1719,7 +1720,7 @@ public void testPointInSetQueryManyEqualValues() throws Exception { } IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 0))); assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 0, -7))); assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 7, 0))); @@ -1777,7 +1778,7 @@ public void testPointInSetQueryManyEqualValuesWithBigGap() throws Exception { } IndexReader r = DirectoryReader.open(w); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 0))); assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 0, -7))); assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 7, 0))); diff --git a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java index f7a1970ca422..c4233c04710b 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java @@ -218,7 +218,7 @@ public void testMixedRangeAndNonRangeTaxonomy() throws Exception { final TaxonomyReader tr = new DirectoryTaxonomyReader(tw); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); if (VERBOSE) { System.out.println("TEST: searcher=" + s); @@ -375,7 +375,7 @@ public void testRandomLongs() throws Exception { } IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); FacetsConfig config = new FacetsConfig(); int numIters = atLeast(10); @@ -516,7 +516,7 @@ public void testRandomFloats() throws Exception { } IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); FacetsConfig config = new FacetsConfig(); int numIters = atLeast(10); @@ -671,7 +671,7 @@ public void testRandomDoubles() throws Exception { } IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); FacetsConfig config = new FacetsConfig(); int numIters = atLeast(10); diff --git a/lucene/misc/src/test/org/apache/lucene/index/SorterTestBase.java b/lucene/misc/src/test/org/apache/lucene/index/SorterTestBase.java index 93caa7016b86..0015c6c629fa 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/SorterTestBase.java +++ b/lucene/misc/src/test/org/apache/lucene/index/SorterTestBase.java @@ -174,7 +174,8 @@ private static Document doc(final int id, PositionsTokenStream positions) { doc.add(new Field(TERM_VECTORS_FIELD, Integer.toString(id), TERM_VECTORS_TYPE)); byte[] bytes = new byte[4]; NumericUtils.intToBytes(id, bytes, 0); - doc.add(new BinaryPoint(DIMENSIONAL_FIELD, bytes)); + // TODO: index time sorting doesn't yet support points + //doc.add(new BinaryPoint(DIMENSIONAL_FIELD, bytes)); return doc; } @@ -378,6 +379,8 @@ public void testTermVectors() throws Exception { } } + // TODO: index sorting doesn't yet support points + /* public void testPoints() throws Exception { PointValues values = sortedReader.getPointValues(); values.intersect(DIMENSIONAL_FIELD, @@ -398,4 +401,5 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { } }); } + */ } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java index 3e8cf3db011c..f6d407de6d7b 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java @@ -40,7 +40,7 @@ public void testBasics() throws Exception { // search and verify we found our doc IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); + IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(BigIntegerPoint.newExactQuery("field", large))); assertEquals(1, searcher.count(BigIntegerPoint.newRangeQuery("field", large.subtract(BigInteger.ONE), false, large.add(BigInteger.ONE), false))); assertEquals(1, searcher.count(BigIntegerPoint.newSetQuery("field", large))); @@ -65,7 +65,7 @@ public void testNegative() throws Exception { // search and verify we found our doc IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); + IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(BigIntegerPoint.newExactQuery("field", negative))); assertEquals(1, searcher.count(BigIntegerPoint.newRangeQuery("field", negative.subtract(BigInteger.ONE), false, negative.add(BigInteger.ONE), false))); diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java index c9be31fc3126..9854001a2d02 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java @@ -40,7 +40,7 @@ public void testBasics() throws Exception { // search and verify we found our doc IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); + IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(InetAddressPoint.newExactQuery("field", address))); assertEquals(1, searcher.count(InetAddressPoint.newPrefixQuery("field", address, 24))); assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("1.2.3.3"), false, InetAddress.getByName("1.2.3.5"), false))); @@ -66,7 +66,7 @@ public void testBasicsV6() throws Exception { // search and verify we found our doc IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); + IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(InetAddressPoint.newExactQuery("field", address))); assertEquals(1, searcher.count(InetAddressPoint.newPrefixQuery("field", address, 64))); assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("fec0::f66c"), false, InetAddress.getByName("fec0::f66e"), false))); diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java index b67dec83ccfe..1d3bfac30525 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java @@ -38,7 +38,7 @@ public void testBoxQuery() throws Exception { // search and verify we found our doc IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); + IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(LatLonPoint.newBoxQuery("field", 18, 19, -66, -65))); reader.close(); diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointDistanceQuery.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointDistanceQuery.java index 3d47b4464c88..e37d75eb441d 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointDistanceQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointDistanceQuery.java @@ -55,7 +55,7 @@ public void testBasics() throws Exception { // search within 50km and verify we found our doc IndexReader reader = writer.getReader(); - IndexSearcher searcher = newSearcher(reader); + IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(LatLonPoint.newDistanceQuery("field", 18, -65, 50_000))); reader.close(); diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java index 1bcadd6b9300..47cd740d1d3c 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java @@ -60,7 +60,7 @@ public void testDuelNumericRangeQuery() throws IOException { } iw.commit(); final IndexReader reader = iw.getReader(); - final IndexSearcher searcher = newSearcher(reader); + final IndexSearcher searcher = newSearcher(reader, false); iw.close(); for (int i = 0; i < 100; ++i) { @@ -188,7 +188,7 @@ public void testApproximation() throws IOException { } iw.commit(); final IndexReader reader = iw.getReader(); - final IndexSearcher searcher = newSearcher(reader); + final IndexSearcher searcher = newSearcher(reader, false); iw.close(); for (int i = 0; i < 100; ++i) { diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingPointFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingPointFormat.java index 3411c7b4b162..15836de6ec0f 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingPointFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingPointFormat.java @@ -17,6 +17,7 @@ package org.apache.lucene.codecs.asserting; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import org.apache.lucene.codecs.PointFormat; @@ -24,9 +25,13 @@ import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.MergeState; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.TestUtil; /** @@ -60,6 +65,83 @@ public PointWriter fieldsWriter(SegmentWriteState state) throws IOException { public PointReader fieldsReader(SegmentReadState state) throws IOException { return new AssertingPointReader(in.fieldsReader(state)); } + + /** Validates in the 1D case that all points are visited in order, and point values are in bounds of the last cell checked */ + static class AssertingIntersectVisitor implements IntersectVisitor { + final IntersectVisitor in; + final int numDims; + final int bytesPerDim; + final byte[] lastDocValue; + final byte[] lastMinPackedValue; + final byte[] lastMaxPackedValue; + private Relation lastCompareResult; + private int lastDocID = -1; + + public AssertingIntersectVisitor(int numDims, int bytesPerDim, IntersectVisitor in) { + this.in = in; + this.numDims = numDims; + this.bytesPerDim = bytesPerDim; + lastMaxPackedValue = new byte[numDims*bytesPerDim]; + lastMinPackedValue = new byte[numDims*bytesPerDim]; + if (numDims == 1) { + lastDocValue = new byte[bytesPerDim]; + } else { + lastDocValue = null; + } + } + + @Override + public void visit(int docID) throws IOException { + // This method, not filtering each hit, should only be invoked when the cell is inside the query shape: + assert lastCompareResult == Relation.CELL_INSIDE_QUERY; + in.visit(docID); + } + + @Override + public void visit(int docID, byte[] packedValue) throws IOException { + + // This method, to filter each doc's value, should only be invoked when the cell crosses the query shape: + assert lastCompareResult == PointValues.Relation.CELL_CROSSES_QUERY; + + // This doc's packed value should be contained in the last cell passed to compare: + for(int dim=0;dim= 0: "dim=" + dim + " of " + numDims; + } + + // TODO: we should assert that this "matches" whatever relation the last call to compare had returned + assert packedValue.length == numDims * bytesPerDim; + if (numDims == 1) { + int cmp = StringHelper.compare(bytesPerDim, lastDocValue, 0, packedValue, 0); + if (cmp < 0) { + // ok + } else if (cmp == 0) { + assert lastDocID <= docID: "doc ids are out of order when point values are the same!"; + } else { + // out of order! + assert false: "point values are out of order"; + } + System.arraycopy(packedValue, 0, lastDocValue, 0, bytesPerDim); + } + in.visit(docID, packedValue); + } + + @Override + public void grow(int count) { + in.grow(count); + } + + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + for(int dim=0;dim= 0; - - if (max.compareTo(queryMin[dim]) < 0 || min.compareTo(queryMax[dim]) > 0) { - return Relation.CELL_OUTSIDE_QUERY; - } else if (min.compareTo(queryMin[dim]) < 0 || max.compareTo(queryMax[dim]) > 0) { - crosses = true; + @Override + public void visit(int docID, byte[] packedValue) { + //System.out.println("visit check docID=" + docID); + for(int dim=0;dim 0) { + //System.out.println(" no"); + return; + } } + + //System.out.println(" yes"); + hits.set(docBase+docID); } - if (crosses) { - return Relation.CELL_CROSSES_QUERY; - } else { - return Relation.CELL_INSIDE_QUERY; + @Override + public Relation compare(byte[] minPacked, byte[] maxPacked) { + boolean crosses = false; + for(int dim=0;dim= 0; + + if (max.compareTo(queryMin[dim]) < 0 || min.compareTo(queryMax[dim]) > 0) { + return Relation.CELL_OUTSIDE_QUERY; + } else if (min.compareTo(queryMin[dim]) < 0 || max.compareTo(queryMax[dim]) > 0) { + crosses = true; + } + } + + if (crosses) { + return Relation.CELL_CROSSES_QUERY; + } else { + return Relation.CELL_INSIDE_QUERY; + } } - } - }); + }); + } for(int docID=0;docID 0) { + System.arraycopy(leafMaxValues, dim*numBytesPerDim, maxValues, dim*numBytesPerDim, numBytesPerDim); + } + } + } + byte[] scratch = new byte[numBytesPerDim]; for(int dim=0;dim 0) { - //System.out.println(" query_outside_cell"); - return Relation.CELL_OUTSIDE_QUERY; - } else if (StringHelper.compare(numBytesPerDim, minPacked, dim*numBytesPerDim, queryMin[dim], 0) < 0 || - StringHelper.compare(numBytesPerDim, maxPacked, dim*numBytesPerDim, queryMax[dim], 0) > 0) { - crosses = true; + //System.out.println("visit check docID=" + docID + " id=" + idValues.get(docID)); + for(int dim=0;dim 0) { + //System.out.println(" query_outside_cell"); + return Relation.CELL_OUTSIDE_QUERY; + } else if (StringHelper.compare(numBytesPerDim, minPacked, dim*numBytesPerDim, queryMin[dim], 0) < 0 || + StringHelper.compare(numBytesPerDim, maxPacked, dim*numBytesPerDim, queryMax[dim], 0) > 0) { + crosses = true; + } + } + + if (crosses) { + //System.out.println(" query_crosses_cell"); + return Relation.CELL_CROSSES_QUERY; + } else { + //System.out.println(" cell_inside_query"); + return Relation.CELL_INSIDE_QUERY; + } } - } - }); + }); + } BitSet expected = new BitSet(); for(int ord=0;ord allFields = new ArrayList<>(); - for (FieldInfo fi : ar.getFieldInfos()) { - allFields.add(fi.name); - } - Collections.shuffle(allFields, random); - final int end = allFields.isEmpty() ? 0 : random.nextInt(allFields.size()); - final Set fields = new HashSet<>(allFields.subList(0, end)); - // will create no FC insanity as ParallelLeafReader has own cache key: - if (VERBOSE) { - System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeafReader(SlowCompositeReaderWapper)"); + if (allowSlowCompositeReader) { + final LeafReader ar = SlowCompositeReaderWrapper.wrap(r); + final List allFields = new ArrayList<>(); + for (FieldInfo fi : ar.getFieldInfos()) { + allFields.add(fi.name); + } + Collections.shuffle(allFields, random); + final int end = allFields.isEmpty() ? 0 : random.nextInt(allFields.size()); + final Set fields = new HashSet<>(allFields.subList(0, end)); + // will create no FC insanity as ParallelLeafReader has own cache key: + if (VERBOSE) { + System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeafReader(SlowCompositeReaderWapper)"); + } + r = new ParallelLeafReader( + new FieldFilterLeafReader(ar, fields, false), + new FieldFilterLeafReader(ar, fields, true) + ); } - r = new ParallelLeafReader( - new FieldFilterLeafReader(ar, fields, false), - new FieldFilterLeafReader(ar, fields, true) - ); break; case 4: // Häckidy-Hick-Hack: a standard Reader will cause FC insanity, so we use @@ -1701,7 +1709,9 @@ public static IndexReader wrapReader(IndexReader r) throws IOException { } } if (wasOriginallyAtomic) { - r = SlowCompositeReaderWrapper.wrap(r); + if (allowSlowCompositeReader) { + r = SlowCompositeReaderWrapper.wrap(r); + } } else if ((r instanceof CompositeReader) && !(r instanceof FCInvisibleMultiReader)) { // prevent cache insanity caused by e.g. ParallelCompositeReader, to fix we wrap one more time: r = new FCInvisibleMultiReader(r); @@ -2588,40 +2598,45 @@ public void assertFieldInfosEquals(String info, IndexReader leftReader, IndexRea } // naive silly memory heavy uninversion!! maps docID -> packed values (a Set because a given doc can be multi-valued) - private Map> uninvert(String fieldName, PointValues points) throws IOException { + private Map> uninvert(String fieldName, IndexReader reader) throws IOException { final Map> docValues = new HashMap<>(); - points.intersect(fieldName, new PointValues.IntersectVisitor() { - @Override - public void visit(int docID) { - throw new UnsupportedOperationException(); - } + for(LeafReaderContext ctx : reader.leaves()) { - @Override - public void visit(int docID, byte[] packedValue) throws IOException { - if (docValues.containsKey(docID) == false) { - docValues.put(docID, new HashSet()); - } - docValues.get(docID).add(new BytesRef(packedValue.clone())); - } + PointValues points = ctx.reader().getPointValues(); + if (points == null) { + continue; + } + + points.intersect(fieldName, + new PointValues.IntersectVisitor() { + @Override + public void visit(int docID) { + throw new UnsupportedOperationException(); + } + + @Override + public void visit(int docID, byte[] packedValue) throws IOException { + int topDocID = ctx.docBase + docID; + if (docValues.containsKey(topDocID) == false) { + docValues.put(topDocID, new HashSet()); + } + docValues.get(topDocID).add(new BytesRef(packedValue.clone())); + } + + @Override + public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + // We pretend our query shape is so hairy that it crosses every single cell: + return PointValues.Relation.CELL_CROSSES_QUERY; + } + }); + } - @Override - public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - // We pretend our query shape is so hairy that it crosses every single cell: - return PointValues.Relation.CELL_CROSSES_QUERY; - } - }); return docValues; } public void assertPointsEquals(String info, IndexReader leftReader, IndexReader rightReader) throws IOException { - assertPointsEquals(info, - MultiFields.getMergedFieldInfos(leftReader), - MultiPointValues.get(leftReader), - MultiFields.getMergedFieldInfos(rightReader), - MultiPointValues.get(rightReader)); - } - - public void assertPointsEquals(String info, FieldInfos fieldInfos1, PointValues points1, FieldInfos fieldInfos2, PointValues points2) throws IOException { + FieldInfos fieldInfos1 = MultiFields.getMergedFieldInfos(leftReader); + FieldInfos fieldInfos2 = MultiFields.getMergedFieldInfos(rightReader); for(FieldInfo fieldInfo1 : fieldInfos1) { if (fieldInfo1.getPointDimensionCount() != 0) { FieldInfo fieldInfo2 = fieldInfos2.fieldInfo(fieldInfo1.name); @@ -2631,8 +2646,8 @@ public void assertPointsEquals(String info, FieldInfos fieldInfos1, PointValues assertEquals(info, fieldInfo2.getPointNumBytes(), fieldInfo2.getPointNumBytes()); assertEquals(info + " field=" + fieldInfo1.name, - uninvert(fieldInfo1.name, points1), - uninvert(fieldInfo1.name, points2)); + uninvert(fieldInfo1.name, leftReader), + uninvert(fieldInfo1.name, rightReader)); } } From e1033d965414b34b990070bb87c509364a7f4194 Mon Sep 17 00:00:00 2001 From: thelabdude Date: Wed, 2 Mar 2016 11:22:27 -0700 Subject: [PATCH 0003/1113] SOLR-8145: Fix position of OOM killer script when starting Solr in the background --- solr/bin/solr | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/solr/bin/solr b/solr/bin/solr index 904c2c389e1f..9cf8ae53d767 100755 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -1420,8 +1420,9 @@ function launch_solr() { exec "$JAVA" "${SOLR_START_OPTS[@]}" $SOLR_ADDL_ARGS -jar start.jar "${SOLR_JETTY_CONFIG[@]}" else # run Solr in the background - nohup "$JAVA" "${SOLR_START_OPTS[@]}" $SOLR_ADDL_ARGS -jar start.jar \ - "-XX:OnOutOfMemoryError=$SOLR_TIP/bin/oom_solr.sh $SOLR_PORT $SOLR_LOGS_DIR" "${SOLR_JETTY_CONFIG[@]}" \ + nohup "$JAVA" "${SOLR_START_OPTS[@]}" $SOLR_ADDL_ARGS \ + "-XX:OnOutOfMemoryError=$SOLR_TIP/bin/oom_solr.sh $SOLR_PORT $SOLR_LOGS_DIR" \ + -jar start.jar "${SOLR_JETTY_CONFIG[@]}" \ 1>"$SOLR_LOGS_DIR/solr-$SOLR_PORT-console.log" 2>&1 & echo $! > "$SOLR_PID_DIR/solr-$SOLR_PORT.pid" # no lsof on cygwin though From ddd019fac0d9eff352a4a17a62d9a9654f7bdc86 Mon Sep 17 00:00:00 2001 From: thelabdude Date: Wed, 2 Mar 2016 11:37:23 -0700 Subject: [PATCH 0004/1113] SOLR-8145: mention fix in solr/CHANGES.txt --- solr/CHANGES.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 3120eeb45e72..eb4195850ca4 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -233,6 +233,9 @@ Bug Fixes * SOLR-8771: Multi-threaded core shutdown creates executor per core. (Mike Drob via Mark Miller) +* SOLR-8145: Fix position of OOM killer script when starting Solr in the background (Jurian Broertjes via + Timothy Potter) + Optimizations ---------------------- * SOLR-7876: Speed up queries and operations that use many terms when timeAllowed has not been From 25cc48bbb8cc11c2f7a2c9da30d675c9ae5926ca Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Wed, 2 Mar 2016 17:19:24 -0500 Subject: [PATCH 0005/1113] LUCENE-7059: remove MultiPointValues --- .../apache/lucene/index/MultiPointValues.java | 172 ------------------ 1 file changed, 172 deletions(-) delete mode 100644 lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java deleted file mode 100644 index dcc33da4aedb..000000000000 --- a/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.index; - - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.lucene.util.StringHelper; - -/** Merges multiple {@link PointValues} into a single one. */ -public class MultiPointValues extends PointValues { - - private final List subs; - private final List docBases; - - private MultiPointValues(List subs, List docBases) { - this.subs = subs; - this.docBases = docBases; - } - - /** Returns a {@link PointValues} merging all point values from the provided reader. */ - public static PointValues get(IndexReader r) { - final List leaves = r.leaves(); - final int size = leaves.size(); - if (size == 0) { - return null; - } else if (size == 1) { - return leaves.get(0).reader().getPointValues(); - } - - List values = new ArrayList<>(); - List docBases = new ArrayList<>(); - for (int i = 0; i < size; i++) { - LeafReaderContext context = leaves.get(i); - PointValues v = context.reader().getPointValues(); - if (v != null) { - values.add(v); - docBases.add(context.docBase); - } - } - - if (values.isEmpty()) { - return null; - } - - return new MultiPointValues(values, docBases); - } - - /** Finds all documents and points matching the provided visitor */ - public void intersect(String fieldName, IntersectVisitor visitor) throws IOException { - for(int i=0;i 0) { - b.append(", "); - } - b.append("docBase="); - b.append(docBases.get(i)); - b.append(" sub=" + subs.get(i)); - } - b.append(')'); - return b.toString(); - } - - @Override - public byte[] getMinPackedValue(String fieldName) throws IOException { - byte[] result = null; - for(int i=0;i 0) { - System.arraycopy(maxPackedValue, offset, result, offset, bytesPerDim); - } - } - } - } - - return result; - } - - @Override - public int getNumDimensions(String fieldName) throws IOException { - for(int i=0;i Date: Wed, 2 Mar 2016 16:51:16 -0500 Subject: [PATCH 0006/1113] LUCENE-7061: fix remaining api issues with XYZPoint classes Squashed commit of the following: commit 0261e28dd29b1c2a1dcbd5e796966b2cdf2f4b82 Author: Robert Muir Date: Wed Mar 2 14:51:49 2016 -0500 Add note about comparison order and test extreme values commit e1f7bc244cd980e931d584c00ba73f8ac521c3fd Author: Mike McCandless Date: Wed Mar 2 14:25:50 2016 -0500 add explicit test to verify rect query is inclusive; make test fail fast by default commit c9be9139ec2f9553ce05fb56b2667be77b8176b6 Author: Robert Muir Date: Wed Mar 2 14:03:47 2016 -0500 oops commit 351d0838bbc87dc7c6d83476bd9cb7ce6c38fc3c Author: Robert Muir Date: Wed Mar 2 13:53:42 2016 -0500 clean up pointrangequery: remove nulls and inclusives commit 0796057a8041ddf43341611b477502fa2307f0b1 Merge: 742ee02 e3198ca Author: Robert Muir Date: Wed Mar 2 13:26:48 2016 -0500 Merge branch 'master' into unfuck_points commit 742ee02aaf55439463daddbd3ea16c5e8df31f01 Author: Robert Muir Date: Wed Mar 2 13:16:08 2016 -0500 Remove nulls and inclusives from points apis --- .../apache/lucene/document/BinaryPoint.java | 82 ++-- .../apache/lucene/document/DoublePoint.java | 84 ++-- .../apache/lucene/document/FloatPoint.java | 84 ++-- .../org/apache/lucene/document/IntPoint.java | 84 ++-- .../org/apache/lucene/document/LongPoint.java | 84 ++-- .../apache/lucene/search/PointInSetQuery.java | 11 +- .../apache/lucene/search/PointRangeQuery.java | 170 ++------ .../index/TestDemoParallelLeafReader.java | 2 +- .../lucene/search/TestPointQueries.java | 410 +++++++++--------- .../TestUsageTrackingFilterCachingPolicy.java | 2 +- .../demo/facet/DistanceFacetsExample.java | 8 +- .../lucene/demo/facet/RangeFacetsExample.java | 3 +- .../lucene/facet/range/DoubleRange.java | 33 +- .../facet/range/DoubleRangeFacetCounts.java | 4 +- .../apache/lucene/facet/range/LongRange.java | 25 +- .../lucene/facet/range/LongRangeCounter.java | 14 +- .../facet/range/TestRangeFacetCounts.java | 20 +- .../search/highlight/HighlighterTest.java | 2 +- .../lucene/document/BigIntegerPoint.java | 97 ++--- .../lucene/document/InetAddressPoint.java | 66 ++- .../apache/lucene/document/LatLonPoint.java | 11 +- .../lucene/document/TestBigIntegerPoint.java | 12 +- .../lucene/document/TestInetAddressPoint.java | 4 +- .../lucene/document/TestLatLonPoint.java | 2 +- .../search/TestDocValuesRangeQuery.java | 26 +- .../lucene/search/TestLatLonPointQueries.java | 8 +- .../spatial/util/BaseGeoPointTestCase.java | 74 +++- .../suggest/document/TestSuggestField.java | 2 +- 28 files changed, 636 insertions(+), 788 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java b/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java index b8b53d5b2157..e139a877264a 100644 --- a/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.document; -import java.io.IOException; import java.util.Arrays; import java.util.Comparator; @@ -25,7 +24,6 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.StringHelper; /** @@ -37,10 +35,10 @@ *

* This field defines static factory methods for creating common queries: *

    - *
  • {@link #newExactQuery newExactQuery()} for matching an exact 1D point. - *
  • {@link #newRangeQuery newRangeQuery()} for matching a 1D range. - *
  • {@link #newMultiRangeQuery newMultiRangeQuery()} for matching points/ranges in n-dimensional space. - *
  • {@link #newSetQuery newSetQuery()} for matching a set of 1D values. + *
  • {@link #newExactQuery(String, byte[])} for matching an exact 1D point. + *
  • {@link #newSetQuery(String, byte[][]) newSetQuery(String, byte[]...)} for matching a set of 1D values. + *
  • {@link #newRangeQuery(String, byte[], byte[])} for matching a 1D range. + *
  • {@link #newRangeQuery(String, byte[][], byte[][])} for matching points/ranges in n-dimensional space. *
*/ public final class BinaryPoint extends Field { @@ -133,7 +131,7 @@ public BinaryPoint(String name, byte[] packedPoint, FieldType type) { * Create a query for matching an exact binary value. *

* This is for simple one-dimension points, for multidimensional points use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, byte[][], byte[][])} instead. * * @param field field name. must not be {@code null}. * @param value binary value @@ -141,56 +139,39 @@ public BinaryPoint(String name, byte[] packedPoint, FieldType type) { * @return a query matching documents with this exact value */ public static Query newExactQuery(String field, byte[] value) { - if (value == null) { - throw new IllegalArgumentException("value cannot be null"); - } - return newRangeQuery(field, value, true, value, true); + return newRangeQuery(field, value, value); } /** * Create a range query for binary values. *

* This is for simple one-dimension ranges, for multidimensional ranges use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. - *

- * You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting the {@code lowerValue} or {@code upperValue} to {@code null}. - *

- * By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * {@link #newRangeQuery(String, byte[][], byte[][])} instead. * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} means "open". - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} means "open". - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null. + * @param lowerValue lower portion of the range (inclusive). must not be {@code null} + * @param upperValue upper portion of the range (inclusive). must not be {@code null} + * @throws IllegalArgumentException if {@code field} is null, if {@code lowerValue} is null, + * or if {@code upperValue} is null * @return a query matching documents within this range. */ - public static Query newRangeQuery(String field, byte[] lowerValue, boolean lowerInclusive, byte[] upperValue, boolean upperInclusive) { - return newMultiRangeQuery(field, new byte[][] {lowerValue}, new boolean[] {lowerInclusive}, new byte[][] {upperValue}, new boolean[] {upperInclusive}); + public static Query newRangeQuery(String field, byte[] lowerValue, byte[] upperValue) { + PointRangeQuery.checkArgs(field, lowerValue, upperValue); + return newRangeQuery(field, new byte[][] {lowerValue}, new byte[][] {upperValue}); } /** - * Create a multidimensional range query for binary values. - *

- * You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}. - *

- * By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Create a range query for n-dimensional binary values. * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension. - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension. - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length} + * @param lowerValue lower portion of the range (inclusive). must not be null. + * @param upperValue upper portion of the range (inclusive). must not be null. + * @throws IllegalArgumentException if {@code field} is null, if {@code lowerValue} is null, if {@code upperValue} is null, + * or if {@code lowerValue.length != upperValue.length} * @return a query matching documents within this range. */ - public static Query newMultiRangeQuery(String field, byte[][] lowerValue, boolean[] lowerInclusive, byte[][] upperValue, boolean[] upperInclusive) { - PointRangeQuery.checkArgs(field, lowerValue, upperValue); - return new PointRangeQuery(field, lowerValue, lowerInclusive, upperValue, upperInclusive) { + public static Query newRangeQuery(String field, byte[][] lowerValue, byte[][] upperValue) { + return new PointRangeQuery(field, lowerValue, upperValue) { @Override protected String toString(int dimension, byte[] value) { assert value != null; @@ -212,13 +193,13 @@ protected String toString(int dimension, byte[] value) { * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. * * @param field field name. must not be {@code null}. - * @param valuesIn all values to match + * @param values all values to match */ - public static Query newSetQuery(String field, byte[]... valuesIn) throws IOException { + public static Query newSetQuery(String field, byte[]... values) { // Make sure all byte[] have the same length int bytesPerDim = -1; - for(byte[] value : valuesIn) { + for(byte[] value : values) { if (bytesPerDim == -1) { bytesPerDim = value.length; } else if (value.length != bytesPerDim) { @@ -232,9 +213,8 @@ public static Query newSetQuery(String field, byte[]... valuesIn) throws IOExcep } // Don't unexpectedly change the user's incoming values array: - byte[][] values = valuesIn.clone(); - - Arrays.sort(values, + byte[][] sortedValues = values.clone(); + Arrays.sort(sortedValues, new Comparator() { @Override public int compare(byte[] a, byte[] b) { @@ -242,21 +222,21 @@ public int compare(byte[] a, byte[] b) { } }); - final BytesRef value = new BytesRef(new byte[bytesPerDim]); + final BytesRef encoded = new BytesRef(new byte[bytesPerDim]); return new PointInSetQuery(field, 1, bytesPerDim, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { - if (upto == values.length) { + if (upto == sortedValues.length) { return null; } else { - value.bytes = values[upto]; + encoded.bytes = sortedValues[upto]; upto++; - return value; + return encoded; } } }) { diff --git a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java index 58ec364f58d9..9dbd96e4e3e6 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java @@ -16,14 +16,12 @@ */ package org.apache.lucene.document; -import java.io.IOException; import java.util.Arrays; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; /** @@ -35,10 +33,10 @@ *

* This field defines static factory methods for creating common queries: *

    - *
  • {@link #newExactQuery newExactQuery()} for matching an exact 1D point. - *
  • {@link #newRangeQuery newRangeQuery()} for matching a 1D range. - *
  • {@link #newMultiRangeQuery newMultiRangeQuery()} for matching points/ranges in n-dimensional space. - *
  • {@link #newSetQuery newSetQuery()} for matching a set of 1D values. + *
  • {@link #newExactQuery(String, double)} for matching an exact 1D point. + *
  • {@link #newSetQuery(String, double...)} for matching a set of 1D values. + *
  • {@link #newRangeQuery(String, double, double)} for matching a 1D range. + *
  • {@link #newRangeQuery(String, double[], double[])} for matching points/ranges in n-dimensional space. *
*/ public final class DoublePoint extends Field { @@ -126,13 +124,11 @@ public String toString() { } /** Encode n-dimensional double point into binary encoding */ - private static byte[][] encode(Double value[]) { + private static byte[][] encode(double value[]) { byte[][] encoded = new byte[value.length][]; for (int i = 0; i < value.length; i++) { - if (value[i] != null) { - encoded[i] = new byte[Double.BYTES]; - encodeDimension(value[i], encoded[i], 0); - } + encoded[i] = new byte[Double.BYTES]; + encodeDimension(value[i], encoded[i], 0); } return encoded; } @@ -155,7 +151,7 @@ public static double decodeDimension(byte value[], int offset) { * Create a query for matching an exact double value. *

* This is for simple one-dimension points, for multidimensional points use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, double[], double[])} instead. * * @param field field name. must not be {@code null}. * @param value double value @@ -163,60 +159,51 @@ public static double decodeDimension(byte value[], int offset) { * @return a query matching documents with this exact value */ public static Query newExactQuery(String field, double value) { - return newRangeQuery(field, value, true, value, true); + return newRangeQuery(field, value, value); } /** * Create a range query for double values. *

* This is for simple one-dimension ranges, for multidimensional ranges use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, double[], double[])} instead. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting the {@code lowerValue} or {@code upperValue} to {@code null}. + * by setting {@code lowerValue = Double.NEGATIVE_INFINITY} or {@code upperValue = Double.POSITIVE_INFINITY}. *

- * By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Range comparisons are consistent with {@link Double#compareTo(Double)}. * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} means "open". - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} means "open". - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. + * @param lowerValue lower portion of the range (inclusive). + * @param upperValue upper portion of the range (inclusive). * @throws IllegalArgumentException if {@code field} is null. * @return a query matching documents within this range. */ - public static Query newRangeQuery(String field, Double lowerValue, boolean lowerInclusive, Double upperValue, boolean upperInclusive) { - return newMultiRangeQuery(field, - new Double[] { lowerValue }, - new boolean[] { lowerInclusive }, - new Double[] { upperValue }, - new boolean[] { upperInclusive }); + public static Query newRangeQuery(String field, double lowerValue, double upperValue) { + return newRangeQuery(field, new double[] { lowerValue }, new double[] { upperValue }); } /** - * Create a multidimensional range query for double values. + * Create a range query for n-dimensional double values. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}. + * by setting {@code lowerValue[i] = Double.NEGATIVE_INFINITY} or {@code upperValue[i] = Double.POSITIVE_INFINITY}. *

- * By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Range comparisons are consistent with {@link Double#compareTo(Double)}. * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension. - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension. - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length} + * @param lowerValue lower portion of the range (inclusive). must not be {@code null}. + * @param upperValue upper portion of the range (inclusive). must not be {@code null}. + * @throws IllegalArgumentException if {@code field} is null, if {@code lowerValue} is null, if {@code upperValue} is null, + * or if {@code lowerValue.length != upperValue.length} * @return a query matching documents within this range. */ - public static Query newMultiRangeQuery(String field, Double[] lowerValue, boolean lowerInclusive[], Double[] upperValue, boolean upperInclusive[]) { + public static Query newRangeQuery(String field, double[] lowerValue, double[] upperValue) { PointRangeQuery.checkArgs(field, lowerValue, upperValue); - return new PointRangeQuery(field, DoublePoint.encode(lowerValue), lowerInclusive, DoublePoint.encode(upperValue), upperInclusive) { + return new PointRangeQuery(field, encode(lowerValue), encode(upperValue)) { @Override protected String toString(int dimension, byte[] value) { - return Double.toString(DoublePoint.decodeDimension(value, 0)); + return Double.toString(decodeDimension(value, 0)); } }; } @@ -225,30 +212,29 @@ protected String toString(int dimension, byte[] value) { * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. * * @param field field name. must not be {@code null}. - * @param valuesIn all values to match + * @param values all values to match */ - public static Query newSetQuery(String field, double... valuesIn) throws IOException { + public static Query newSetQuery(String field, double... values) { // Don't unexpectedly change the user's incoming values array: - double[] values = valuesIn.clone(); - - Arrays.sort(values); + double[] sortedValues = values.clone(); + Arrays.sort(sortedValues); - final BytesRef value = new BytesRef(new byte[Double.BYTES]); + final BytesRef encoded = new BytesRef(new byte[Double.BYTES]); return new PointInSetQuery(field, 1, Double.BYTES, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { - if (upto == values.length) { + if (upto == sortedValues.length) { return null; } else { - encodeDimension(values[upto], value.bytes, 0); + encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; - return value; + return encoded; } } }) { diff --git a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java index 7829f80d712f..0b82abe11281 100644 --- a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java @@ -16,14 +16,12 @@ */ package org.apache.lucene.document; -import java.io.IOException; import java.util.Arrays; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; /** @@ -35,10 +33,10 @@ *

* This field defines static factory methods for creating common queries: *

    - *
  • {@link #newExactQuery newExactQuery()} for matching an exact 1D point. - *
  • {@link #newRangeQuery newRangeQuery()} for matching a 1D range. - *
  • {@link #newMultiRangeQuery newMultiRangeQuery()} for matching points/ranges in n-dimensional space. - *
  • {@link #newSetQuery newSetQuery()} for matching a set of 1D values. + *
  • {@link #newExactQuery(String, float)} for matching an exact 1D point. + *
  • {@link #newSetQuery(String, float...)} for matching a set of 1D values. + *
  • {@link #newRangeQuery(String, float, float)} for matching a 1D range. + *
  • {@link #newRangeQuery(String, float[], float[])} for matching points/ranges in n-dimensional space. *
*/ public final class FloatPoint extends Field { @@ -126,13 +124,11 @@ public String toString() { } /** Encode n-dimensional float values into binary encoding */ - private static byte[][] encode(Float value[]) { + private static byte[][] encode(float value[]) { byte[][] encoded = new byte[value.length][]; for (int i = 0; i < value.length; i++) { - if (value[i] != null) { - encoded[i] = new byte[Float.BYTES]; - encodeDimension(value[i], encoded[i], 0); - } + encoded[i] = new byte[Float.BYTES]; + encodeDimension(value[i], encoded[i], 0); } return encoded; } @@ -155,7 +151,7 @@ public static float decodeDimension(byte value[], int offset) { * Create a query for matching an exact float value. *

* This is for simple one-dimension points, for multidimensional points use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, float[], float[])} instead. * * @param field field name. must not be {@code null}. * @param value float value @@ -163,60 +159,51 @@ public static float decodeDimension(byte value[], int offset) { * @return a query matching documents with this exact value */ public static Query newExactQuery(String field, float value) { - return newRangeQuery(field, value, true, value, true); + return newRangeQuery(field, value, value); } /** * Create a range query for float values. *

* This is for simple one-dimension ranges, for multidimensional ranges use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, float[], float[])} instead. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting the {@code lowerValue} or {@code upperValue} to {@code null}. + * by setting {@code lowerValue = Float.NEGATIVE_INFINITY} or {@code upperValue = Float.POSITIVE_INFINITY}. *

- * By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Range comparisons are consistent with {@link Float#compareTo(Float)}. * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} means "open". - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} means "open". - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. + * @param lowerValue lower portion of the range (inclusive). + * @param upperValue upper portion of the range (inclusive). * @throws IllegalArgumentException if {@code field} is null. * @return a query matching documents within this range. */ - public static Query newRangeQuery(String field, Float lowerValue, boolean lowerInclusive, Float upperValue, boolean upperInclusive) { - return newMultiRangeQuery(field, - new Float[] { lowerValue }, - new boolean[] { lowerInclusive }, - new Float[] { upperValue }, - new boolean[] { upperInclusive }); + public static Query newRangeQuery(String field, float lowerValue, float upperValue) { + return newRangeQuery(field, new float[] { lowerValue }, new float[] { upperValue }); } /** - * Create a multidimensional range query for float values. + * Create a range query for n-dimensional float values. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}. + * by setting {@code lowerValue[i] = Float.NEGATIVE_INFINITY} or {@code upperValue[i] = Float.POSITIVE_INFINITY}. *

- * By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Range comparisons are consistent with {@link Float#compareTo(Float)}. * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension. - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension. - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length} + * @param lowerValue lower portion of the range (inclusive). must not be {@code null}. + * @param upperValue upper portion of the range (inclusive). must not be {@code null}. + * @throws IllegalArgumentException if {@code field} is null, if {@code lowerValue} is null, if {@code upperValue} is null, + * or if {@code lowerValue.length != upperValue.length} * @return a query matching documents within this range. */ - public static Query newMultiRangeQuery(String field, Float[] lowerValue, boolean lowerInclusive[], Float[] upperValue, boolean upperInclusive[]) { + public static Query newRangeQuery(String field, float[] lowerValue, float[] upperValue) { PointRangeQuery.checkArgs(field, lowerValue, upperValue); - return new PointRangeQuery(field, FloatPoint.encode(lowerValue), lowerInclusive, FloatPoint.encode(upperValue), upperInclusive) { + return new PointRangeQuery(field, encode(lowerValue), encode(upperValue)) { @Override protected String toString(int dimension, byte[] value) { - return Float.toString(FloatPoint.decodeDimension(value, 0)); + return Float.toString(decodeDimension(value, 0)); } }; } @@ -225,30 +212,29 @@ protected String toString(int dimension, byte[] value) { * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. * * @param field field name. must not be {@code null}. - * @param valuesIn all values to match + * @param values all values to match */ - public static Query newSetQuery(String field, float... valuesIn) throws IOException { + public static Query newSetQuery(String field, float... values) { // Don't unexpectedly change the user's incoming values array: - float[] values = valuesIn.clone(); - - Arrays.sort(values); + float[] sortedValues = values.clone(); + Arrays.sort(sortedValues); - final BytesRef value = new BytesRef(new byte[Float.BYTES]); + final BytesRef encoded = new BytesRef(new byte[Float.BYTES]); return new PointInSetQuery(field, 1, Float.BYTES, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { - if (upto == values.length) { + if (upto == sortedValues.length) { return null; } else { - encodeDimension(values[upto], value.bytes, 0); + encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; - return value; + return encoded; } } }) { diff --git a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java index f27df755863e..effcb62945ac 100644 --- a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java @@ -16,14 +16,12 @@ */ package org.apache.lucene.document; -import java.io.IOException; import java.util.Arrays; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; /** @@ -35,10 +33,10 @@ *

* This field defines static factory methods for creating common queries: *

    - *
  • {@link #newExactQuery newExactQuery()} for matching an exact 1D point. - *
  • {@link #newRangeQuery newRangeQuery()} for matching a 1D range. - *
  • {@link #newMultiRangeQuery newMultiRangeQuery()} for matching points/ranges in n-dimensional space. - *
  • {@link #newSetQuery newSetQuery()} for matching a set of 1D values. + *
  • {@link #newExactQuery(String, int)} for matching an exact 1D point. + *
  • {@link #newSetQuery(String, int...)} for matching a set of 1D values. + *
  • {@link #newRangeQuery(String, int, int)} for matching a 1D range. + *
  • {@link #newRangeQuery(String, int[], int[])} for matching points/ranges in n-dimensional space. *
*/ public final class IntPoint extends Field { @@ -126,13 +124,11 @@ public String toString() { } /** Encode n-dimensional integer values into binary encoding */ - private static byte[][] encode(Integer value[]) { + private static byte[][] encode(int value[]) { byte[][] encoded = new byte[value.length][]; for (int i = 0; i < value.length; i++) { - if (value[i] != null) { - encoded[i] = new byte[Integer.BYTES]; - encodeDimension(value[i], encoded[i], 0); - } + encoded[i] = new byte[Integer.BYTES]; + encodeDimension(value[i], encoded[i], 0); } return encoded; } @@ -155,7 +151,7 @@ public static int decodeDimension(byte value[], int offset) { * Create a query for matching an exact integer value. *

* This is for simple one-dimension points, for multidimensional points use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, int[], int[])} instead. * * @param field field name. must not be {@code null}. * @param value exact value @@ -163,60 +159,51 @@ public static int decodeDimension(byte value[], int offset) { * @return a query matching documents with this exact value */ public static Query newExactQuery(String field, int value) { - return newRangeQuery(field, value, true, value, true); + return newRangeQuery(field, value, value); } /** * Create a range query for integer values. *

* This is for simple one-dimension ranges, for multidimensional ranges use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, int[], int[])} instead. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting the {@code lowerValue} or {@code upperValue} to {@code null}. + * by setting {@code lowerValue = Integer.MIN_VALUE} or {@code upperValue = Integer.MAX_VALUE}. *

- * By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Ranges are inclusive. For exclusive ranges, pass {@code lowerValue + 1} or {@code upperValue - 1} * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} means "open". - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} means "open". - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. + * @param lowerValue lower portion of the range (inclusive). + * @param upperValue upper portion of the range (inclusive). * @throws IllegalArgumentException if {@code field} is null. * @return a query matching documents within this range. */ - public static Query newRangeQuery(String field, Integer lowerValue, boolean lowerInclusive, Integer upperValue, boolean upperInclusive) { - return newMultiRangeQuery(field, - new Integer[] { lowerValue }, - new boolean[] { lowerInclusive }, - new Integer[] { upperValue }, - new boolean[] { upperInclusive }); + public static Query newRangeQuery(String field, int lowerValue, int upperValue) { + return newRangeQuery(field, new int[] { lowerValue }, new int[] { upperValue }); } /** - * Create a multidimensional range query for integer values. + * Create a range query for n-dimensional integer values. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}. + * by setting {@code lowerValue[i] = Integer.MIN_VALUE} or {@code upperValue[i] = Integer.MAX_VALUE}. *

- * By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Ranges are inclusive. For exclusive ranges, pass {@code lowerValue[i] + 1} or {@code upperValue[i] - 1} * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension. - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension. - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length} + * @param lowerValue lower portion of the range (inclusive). must not be {@code null}. + * @param upperValue upper portion of the range (inclusive). must not be {@code null}. + * @throws IllegalArgumentException if {@code field} is null, if {@code lowerValue} is null, if {@code upperValue} is null, + * or if {@code lowerValue.length != upperValue.length} * @return a query matching documents within this range. */ - public static Query newMultiRangeQuery(String field, Integer[] lowerValue, boolean lowerInclusive[], Integer[] upperValue, boolean upperInclusive[]) { + public static Query newRangeQuery(String field, int[] lowerValue, int[] upperValue) { PointRangeQuery.checkArgs(field, lowerValue, upperValue); - return new PointRangeQuery(field, IntPoint.encode(lowerValue), lowerInclusive, IntPoint.encode(upperValue), upperInclusive) { + return new PointRangeQuery(field, encode(lowerValue), encode(upperValue)) { @Override protected String toString(int dimension, byte[] value) { - return Integer.toString(IntPoint.decodeDimension(value, 0)); + return Integer.toString(decodeDimension(value, 0)); } }; } @@ -225,30 +212,29 @@ protected String toString(int dimension, byte[] value) { * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. * * @param field field name. must not be {@code null}. - * @param valuesIn all values to match + * @param values all values to match */ - public static Query newSetQuery(String field, int... valuesIn) throws IOException { + public static Query newSetQuery(String field, int... values) { // Don't unexpectedly change the user's incoming values array: - int[] values = valuesIn.clone(); - - Arrays.sort(values); + int[] sortedValues = values.clone(); + Arrays.sort(sortedValues); - final BytesRef value = new BytesRef(new byte[Integer.BYTES]); + final BytesRef encoded = new BytesRef(new byte[Integer.BYTES]); return new PointInSetQuery(field, 1, Integer.BYTES, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { - if (upto == values.length) { + if (upto == sortedValues.length) { return null; } else { - encodeDimension(values[upto], value.bytes, 0); + encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; - return value; + return encoded; } } }) { diff --git a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java index 5b3708942857..a1d05d1deff9 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java @@ -16,14 +16,12 @@ */ package org.apache.lucene.document; -import java.io.IOException; import java.util.Arrays; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; /** @@ -35,10 +33,10 @@ *

* This field defines static factory methods for creating common queries: *

    - *
  • {@link #newExactQuery newExactQuery()} for matching an exact 1D point. - *
  • {@link #newRangeQuery newRangeQuery()} for matching a 1D range. - *
  • {@link #newMultiRangeQuery newMultiRangeQuery()} for matching points/ranges in n-dimensional space. - *
  • {@link #newSetQuery newSetQuery()} for matching a set of 1D values. + *
  • {@link #newExactQuery(String, long)} for matching an exact 1D point. + *
  • {@link #newSetQuery(String, long...)} for matching a set of 1D values. + *
  • {@link #newRangeQuery(String, long, long)} for matching a 1D range. + *
  • {@link #newRangeQuery(String, long[], long[])} for matching points/ranges in n-dimensional space. *
*/ public final class LongPoint extends Field { @@ -126,13 +124,11 @@ public String toString() { } /** Encode n-dimensional long values into binary encoding */ - private static byte[][] encode(Long value[]) { + private static byte[][] encode(long value[]) { byte[][] encoded = new byte[value.length][]; for (int i = 0; i < value.length; i++) { - if (value[i] != null) { - encoded[i] = new byte[Long.BYTES]; - encodeDimension(value[i], encoded[i], 0); - } + encoded[i] = new byte[Long.BYTES]; + encodeDimension(value[i], encoded[i], 0); } return encoded; } @@ -155,7 +151,7 @@ public static long decodeDimension(byte value[], int offset) { * Create a query for matching an exact long value. *

* This is for simple one-dimension points, for multidimensional points use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, long[], long[])} instead. * * @param field field name. must not be {@code null}. * @param value exact value @@ -163,60 +159,51 @@ public static long decodeDimension(byte value[], int offset) { * @return a query matching documents with this exact value */ public static Query newExactQuery(String field, long value) { - return newRangeQuery(field, value, true, value, true); + return newRangeQuery(field, value, value); } /** * Create a range query for long values. *

* This is for simple one-dimension ranges, for multidimensional ranges use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, long[], long[])} instead. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting the {@code lowerValue} or {@code upperValue} to {@code null}. + * by setting {@code lowerValue = Long.MIN_VALUE} or {@code upperValue = Long.MAX_VALUE}. *

- * By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Ranges are inclusive. For exclusive ranges, pass {@code lowerValue + 1} or {@code upperValue - 1} * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} means "open". - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} means "open". - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. + * @param lowerValue lower portion of the range (inclusive). + * @param upperValue upper portion of the range (inclusive). * @throws IllegalArgumentException if {@code field} is null. * @return a query matching documents within this range. */ - public static Query newRangeQuery(String field, Long lowerValue, boolean lowerInclusive, Long upperValue, boolean upperInclusive) { - return newMultiRangeQuery(field, - new Long[] { lowerValue }, - new boolean[] { lowerInclusive }, - new Long[] { upperValue }, - new boolean[] { upperInclusive }); + public static Query newRangeQuery(String field, long lowerValue, long upperValue) { + return newRangeQuery(field, new long[] { lowerValue }, new long[] { upperValue }); } /** - * Create a multidimensional range query for long values. + * Create a range query for n-dimensional long values. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}. + * by setting {@code lowerValue[i] = Long.MIN_VALUE} or {@code upperValue[i] = Long.MAX_VALUE}. *

- * By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Ranges are inclusive. For exclusive ranges, pass {@code lowerValue[i] + 1} or {@code upperValue[i] - 1} * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension. - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension. - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length} + * @param lowerValue lower portion of the range (inclusive). must not be {@code null}. + * @param upperValue upper portion of the range (inclusive). must not be {@code null}. + * @throws IllegalArgumentException if {@code field} is null, if {@code lowerValue} is null, if {@code upperValue} is null, + * or if {@code lowerValue.length != upperValue.length} * @return a query matching documents within this range. */ - public static Query newMultiRangeQuery(String field, Long[] lowerValue, boolean lowerInclusive[], Long[] upperValue, boolean upperInclusive[]) { + public static Query newRangeQuery(String field, long[] lowerValue, long[] upperValue) { PointRangeQuery.checkArgs(field, lowerValue, upperValue); - return new PointRangeQuery(field, LongPoint.encode(lowerValue), lowerInclusive, LongPoint.encode(upperValue), upperInclusive) { + return new PointRangeQuery(field, encode(lowerValue), encode(upperValue)) { @Override protected String toString(int dimension, byte[] value) { - return Long.toString(LongPoint.decodeDimension(value, 0)); + return Long.toString(decodeDimension(value, 0)); } }; } @@ -225,30 +212,29 @@ protected String toString(int dimension, byte[] value) { * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. * * @param field field name. must not be {@code null}. - * @param valuesIn all values to match + * @param values all values to match */ - public static Query newSetQuery(String field, long... valuesIn) throws IOException { + public static Query newSetQuery(String field, long... values) { // Don't unexpectedly change the user's incoming values array: - long[] values = valuesIn.clone(); - - Arrays.sort(values); + long[] sortedValues = values.clone(); + Arrays.sort(sortedValues); - final BytesRef value = new BytesRef(new byte[Long.BYTES]); + final BytesRef encoded = new BytesRef(new byte[Long.BYTES]); return new PointInSetQuery(field, 1, Long.BYTES, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { - if (upto == values.length) { + if (upto == sortedValues.length) { return null; } else { - encodeDimension(values[upto], value.bytes, 0); + encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; - return value; + return encoded; } } }) { diff --git a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java index 3d6086c90858..f5ba12dc0db6 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java @@ -64,9 +64,18 @@ public abstract class PointInSetQuery extends Query { final String field; final int numDims; final int bytesPerDim; + + /** + * Iterator of encoded point values. + */ + // TODO: if we want to stream, maybe we should use jdk stream class? + public static abstract class Stream implements BytesRefIterator { + @Override + public abstract BytesRef next(); + }; /** The {@code packedPoints} iterator must be in sorted order. */ - protected PointInSetQuery(String field, int numDims, int bytesPerDim, BytesRefIterator packedPoints) throws IOException { + protected PointInSetQuery(String field, int numDims, int bytesPerDim, Stream packedPoints) { this.field = field; if (bytesPerDim < 1 || bytesPerDim > PointValues.MAX_NUM_BYTES) { throw new IllegalArgumentException("bytesPerDim must be > 0 and <= " + PointValues.MAX_NUM_BYTES + "; got " + bytesPerDim); diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java index 189ba433f317..85c486e7ddea 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.search; - import java.io.IOException; import java.util.Arrays; import java.util.Objects; @@ -33,7 +32,6 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.DocIdSetBuilder; -import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.StringHelper; /** @@ -57,72 +55,49 @@ public abstract class PointRangeQuery extends Query { final String field; final int numDims; + final int bytesPerDim; final byte[][] lowerPoint; - final boolean[] lowerInclusive; final byte[][] upperPoint; - final boolean[] upperInclusive; - // This is null only in the "fully open range" case - final Integer bytesPerDim; /** * Expert: create a multidimensional range query for point values. - *

- * You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}. - *

- * By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. * * @param field field name. must not be {@code null}. - * @param lowerPoint lower portion of the range. {@code null} values mean "open" for that dimension. - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperPoint upper portion of the range. {@code null} values mean "open" for that dimension. - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. + * @param lowerPoint lower portion of the range (inclusive). + * @param upperPoint upper portion of the range (inclusive). * @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length} */ - protected PointRangeQuery(String field, - byte[][] lowerPoint, boolean[] lowerInclusive, - byte[][] upperPoint, boolean[] upperInclusive) { + protected PointRangeQuery(String field, byte[][] lowerPoint, byte[][] upperPoint) { checkArgs(field, lowerPoint, upperPoint); this.field = field; - numDims = lowerPoint.length; + if (lowerPoint.length == 0) { + throw new IllegalArgumentException("lowerPoint has length of zero"); + } + this.numDims = lowerPoint.length; + if (upperPoint.length != numDims) { throw new IllegalArgumentException("lowerPoint has length=" + numDims + " but upperPoint has different length=" + upperPoint.length); } - if (lowerInclusive.length != numDims) { - throw new IllegalArgumentException("lowerInclusive has length=" + lowerInclusive.length + " but expected=" + numDims); - } - if (upperInclusive.length != numDims) { - throw new IllegalArgumentException("upperInclusive has length=" + upperInclusive.length + " but expected=" + numDims); - } this.lowerPoint = lowerPoint; - this.lowerInclusive = lowerInclusive; this.upperPoint = upperPoint; - this.upperInclusive = upperInclusive; - - int bytesPerDim = -1; - for(byte[] value : lowerPoint) { - if (value != null) { - if (bytesPerDim == -1) { - bytesPerDim = value.length; - } else if (value.length != bytesPerDim) { - throw new IllegalArgumentException("all dimensions must have same bytes length, but saw " + bytesPerDim + " and " + value.length); - } - } + + if (lowerPoint[0] == null) { + throw new IllegalArgumentException("lowerPoint[0] is null"); } - for(byte[] value : upperPoint) { - if (value != null) { - if (bytesPerDim == -1) { - bytesPerDim = value.length; - } else if (value.length != bytesPerDim) { - throw new IllegalArgumentException("all dimensions must have same bytes length, but saw " + bytesPerDim + " and " + value.length); - } + this.bytesPerDim = lowerPoint[0].length; + for (int i = 0; i < numDims; i++) { + if (lowerPoint[i] == null) { + throw new IllegalArgumentException("lowerPoint[" + i + "] is null"); + } + if (upperPoint[i] == null) { + throw new IllegalArgumentException("upperPoint[" + i + "] is null"); + } + if (lowerPoint[i].length != bytesPerDim) { + throw new IllegalArgumentException("all dimensions must have same bytes length, but saw " + bytesPerDim + " and " + lowerPoint[i].length); + } + if (upperPoint[i].length != bytesPerDim) { + throw new IllegalArgumentException("all dimensions must have same bytes length, but saw " + bytesPerDim + " and " + upperPoint[i].length); } - } - if (bytesPerDim == -1) { - this.bytesPerDim = null; - } else { - this.bytesPerDim = bytesPerDim; } } @@ -166,55 +141,18 @@ public Scorer scorer(LeafReaderContext context) throws IOException { if (fieldInfo.getPointDimensionCount() != numDims) { throw new IllegalArgumentException("field=\"" + field + "\" was indexed with numDims=" + fieldInfo.getPointDimensionCount() + " but this query has numDims=" + numDims); } - if (bytesPerDim != null && bytesPerDim.intValue() != fieldInfo.getPointNumBytes()) { + if (bytesPerDim != fieldInfo.getPointNumBytes()) { throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + fieldInfo.getPointNumBytes() + " but this query has bytesPerDim=" + bytesPerDim); } int bytesPerDim = fieldInfo.getPointNumBytes(); - byte[] packedLowerIncl = new byte[numDims * bytesPerDim]; - byte[] packedUpperIncl = new byte[numDims * bytesPerDim]; - - byte[] minValue = new byte[bytesPerDim]; - byte[] maxValue = new byte[bytesPerDim]; - Arrays.fill(maxValue, (byte) 0xff); - - byte[] one = new byte[bytesPerDim]; - one[bytesPerDim-1] = 1; + byte[] packedLower = new byte[numDims * bytesPerDim]; + byte[] packedUpper = new byte[numDims * bytesPerDim]; - // Carefully pack lower and upper bounds, taking care of per-dim inclusive: + // Carefully pack lower and upper bounds for(int dim=0;dim 0) { + if (StringHelper.compare(bytesPerDim, packedValue, offset, packedUpper, offset) > 0) { // Doc's value is too high, in this dimension return; } @@ -260,13 +198,13 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { for(int dim=0;dim 0 || - StringHelper.compare(bytesPerDim, maxPackedValue, offset, packedLowerIncl, offset) < 0) { + if (StringHelper.compare(bytesPerDim, minPackedValue, offset, packedUpper, offset) > 0 || + StringHelper.compare(bytesPerDim, maxPackedValue, offset, packedLower, offset) < 0) { return Relation.CELL_OUTSIDE_QUERY; } - crosses |= StringHelper.compare(bytesPerDim, minPackedValue, offset, packedLowerIncl, offset) < 0 || - StringHelper.compare(bytesPerDim, maxPackedValue, offset, packedUpperIncl, offset) > 0; + crosses |= StringHelper.compare(bytesPerDim, minPackedValue, offset, packedLower, offset) < 0 || + StringHelper.compare(bytesPerDim, maxPackedValue, offset, packedUpper, offset) > 0; } if (crosses) { @@ -287,8 +225,6 @@ public int hashCode() { int hash = super.hashCode(); hash = 31 * hash + Arrays.hashCode(lowerPoint); hash = 31 * hash + Arrays.hashCode(upperPoint); - hash = 31 * hash + Arrays.hashCode(lowerInclusive); - hash = 31 * hash + Arrays.hashCode(upperInclusive); hash = 31 * hash + numDims; hash = 31 * hash + Objects.hashCode(bytesPerDim); return hash; @@ -301,9 +237,7 @@ public boolean equals(Object other) { return q.numDims == numDims && q.bytesPerDim == bytesPerDim && Arrays.equals(lowerPoint, q.lowerPoint) && - Arrays.equals(lowerInclusive, q.lowerInclusive) && - Arrays.equals(upperPoint, q.upperPoint) && - Arrays.equals(upperInclusive, q.upperInclusive); + Arrays.equals(upperPoint, q.upperPoint); } return false; @@ -323,31 +257,11 @@ public String toString(String field) { sb.append(','); } - if (lowerInclusive[i]) { - sb.append('['); - } else { - sb.append('{'); - } - - if (lowerPoint[i] == null) { - sb.append('*'); - } else { - sb.append(toString(i, lowerPoint[i])); - } - + sb.append('['); + sb.append(toString(i, lowerPoint[i])); sb.append(" TO "); - - if (upperPoint[i] == null) { - sb.append('*'); - } else { - sb.append(toString(i, upperPoint[i])); - } - - if (upperInclusive[i]) { - sb.append(']'); - } else { - sb.append('}'); - } + sb.append(toString(i, upperPoint[i])); + sb.append(']'); } return sb.toString(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java index 0f265512632a..0034cee0c453 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java @@ -1351,7 +1351,7 @@ private static void testPointRangeQuery(IndexSearcher s) throws IOException { max = x; } - TopDocs hits = s.search(LongPoint.newRangeQuery("number", min, true, max, true), 100); + TopDocs hits = s.search(LongPoint.newRangeQuery("number", min, max), 100); for(ScoreDoc scoreDoc : hits.scoreDocs) { long value = Long.parseLong(s.doc(scoreDoc.doc).get("text").split(" ")[1]); assertTrue(value >= min); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java index fb3179298fb2..19096c760035 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java @@ -63,7 +63,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.NumericUtils; @@ -118,8 +117,8 @@ public void testBasicInts() throws Exception { DirectoryReader r = DirectoryReader.open(w); IndexSearcher s = new IndexSearcher(r); - assertEquals(2, s.count(IntPoint.newRangeQuery("point", -8, false, 1, false))); - assertEquals(3, s.count(IntPoint.newRangeQuery("point", -7, true, 3, true))); + assertEquals(2, s.count(IntPoint.newRangeQuery("point", -8, 1))); + assertEquals(3, s.count(IntPoint.newRangeQuery("point", -7, 3))); assertEquals(1, s.count(IntPoint.newExactQuery("point", -7))); assertEquals(0, s.count(IntPoint.newExactQuery("point", -6))); w.close(); @@ -145,8 +144,8 @@ public void testBasicFloats() throws Exception { DirectoryReader r = DirectoryReader.open(w); IndexSearcher s = new IndexSearcher(r); - assertEquals(2, s.count(FloatPoint.newRangeQuery("point", -8.0f, false, 1.0f, false))); - assertEquals(3, s.count(FloatPoint.newRangeQuery("point", -7.0f, true, 3.0f, true))); + assertEquals(2, s.count(FloatPoint.newRangeQuery("point", -8.0f, 1.0f))); + assertEquals(3, s.count(FloatPoint.newRangeQuery("point", -7.0f, 3.0f))); assertEquals(1, s.count(FloatPoint.newExactQuery("point", -7.0f))); assertEquals(0, s.count(FloatPoint.newExactQuery("point", -6.0f))); w.close(); @@ -172,8 +171,8 @@ public void testBasicLongs() throws Exception { DirectoryReader r = DirectoryReader.open(w); IndexSearcher s = new IndexSearcher(r); - assertEquals(2, s.count(LongPoint.newRangeQuery("point", -8L, false, 1L, false))); - assertEquals(3, s.count(LongPoint.newRangeQuery("point", -7L, true, 3L, true))); + assertEquals(2, s.count(LongPoint.newRangeQuery("point", -8L, 1L))); + assertEquals(3, s.count(LongPoint.newRangeQuery("point", -7L, 3L))); assertEquals(1, s.count(LongPoint.newExactQuery("point", -7L))); assertEquals(0, s.count(LongPoint.newExactQuery("point", -6L))); w.close(); @@ -199,18 +198,140 @@ public void testBasicDoubles() throws Exception { DirectoryReader r = DirectoryReader.open(w); IndexSearcher s = new IndexSearcher(r); - assertEquals(2, s.count(DoublePoint.newRangeQuery("point", -8.0, false, 1.0, false))); - assertEquals(3, s.count(DoublePoint.newRangeQuery("point", -7.0, true, 3.0, true))); + assertEquals(2, s.count(DoublePoint.newRangeQuery("point", -8.0, 1.0))); + assertEquals(3, s.count(DoublePoint.newRangeQuery("point", -7.0, 3.0))); assertEquals(1, s.count(DoublePoint.newExactQuery("point", -7.0))); assertEquals(0, s.count(DoublePoint.newExactQuery("point", -6.0))); w.close(); r.close(); dir.close(); } + + public void testCrazyDoubles() throws Exception { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random()))); + + Document doc = new Document(); + doc.add(new DoublePoint("point", Double.NEGATIVE_INFINITY)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new DoublePoint("point", -0.0D)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new DoublePoint("point", +0.0D)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new DoublePoint("point", Double.MIN_VALUE)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new DoublePoint("point", Double.MAX_VALUE)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new DoublePoint("point", Double.POSITIVE_INFINITY)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new DoublePoint("point", Double.NaN)); + w.addDocument(doc); + + DirectoryReader r = DirectoryReader.open(w); + IndexSearcher s = new IndexSearcher(r); + + // exact queries + assertEquals(1, s.count(DoublePoint.newExactQuery("point", Double.NEGATIVE_INFINITY))); + assertEquals(1, s.count(DoublePoint.newExactQuery("point", -0.0D))); + assertEquals(1, s.count(DoublePoint.newExactQuery("point", +0.0D))); + assertEquals(1, s.count(DoublePoint.newExactQuery("point", Double.MIN_VALUE))); + assertEquals(1, s.count(DoublePoint.newExactQuery("point", Double.MAX_VALUE))); + assertEquals(1, s.count(DoublePoint.newExactQuery("point", Double.POSITIVE_INFINITY))); + assertEquals(1, s.count(DoublePoint.newExactQuery("point", Double.NaN))); + + // set query + double set[] = new double[] { Double.MAX_VALUE, Double.NaN, +0.0D, Double.NEGATIVE_INFINITY, Double.MIN_VALUE, -0.0D, Double.POSITIVE_INFINITY }; + assertEquals(7, s.count(DoublePoint.newSetQuery("point", set))); + + // ranges + assertEquals(2, s.count(DoublePoint.newRangeQuery("point", Double.NEGATIVE_INFINITY, -0.0D))); + assertEquals(2, s.count(DoublePoint.newRangeQuery("point", -0.0D, 0.0D))); + assertEquals(2, s.count(DoublePoint.newRangeQuery("point", 0.0D, Double.MIN_VALUE))); + assertEquals(2, s.count(DoublePoint.newRangeQuery("point", Double.MIN_VALUE, Double.MAX_VALUE))); + assertEquals(2, s.count(DoublePoint.newRangeQuery("point", Double.MAX_VALUE, Double.POSITIVE_INFINITY))); + assertEquals(2, s.count(DoublePoint.newRangeQuery("point", Double.POSITIVE_INFINITY, Double.NaN))); + + w.close(); + r.close(); + dir.close(); + } + + public void testCrazyFloats() throws Exception { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random()))); + + Document doc = new Document(); + doc.add(new FloatPoint("point", Float.NEGATIVE_INFINITY)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new FloatPoint("point", -0.0F)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new FloatPoint("point", +0.0F)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new FloatPoint("point", Float.MIN_VALUE)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new FloatPoint("point", Float.MAX_VALUE)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new FloatPoint("point", Float.POSITIVE_INFINITY)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new FloatPoint("point", Float.NaN)); + w.addDocument(doc); + + DirectoryReader r = DirectoryReader.open(w); + IndexSearcher s = new IndexSearcher(r); + + // exact queries + assertEquals(1, s.count(FloatPoint.newExactQuery("point", Float.NEGATIVE_INFINITY))); + assertEquals(1, s.count(FloatPoint.newExactQuery("point", -0.0F))); + assertEquals(1, s.count(FloatPoint.newExactQuery("point", +0.0F))); + assertEquals(1, s.count(FloatPoint.newExactQuery("point", Float.MIN_VALUE))); + assertEquals(1, s.count(FloatPoint.newExactQuery("point", Float.MAX_VALUE))); + assertEquals(1, s.count(FloatPoint.newExactQuery("point", Float.POSITIVE_INFINITY))); + assertEquals(1, s.count(FloatPoint.newExactQuery("point", Float.NaN))); + + // set query + float set[] = new float[] { Float.MAX_VALUE, Float.NaN, +0.0F, Float.NEGATIVE_INFINITY, Float.MIN_VALUE, -0.0F, Float.POSITIVE_INFINITY }; + assertEquals(7, s.count(FloatPoint.newSetQuery("point", set))); + + // ranges + assertEquals(2, s.count(FloatPoint.newRangeQuery("point", Float.NEGATIVE_INFINITY, -0.0F))); + assertEquals(2, s.count(FloatPoint.newRangeQuery("point", -0.0F, 0.0F))); + assertEquals(2, s.count(FloatPoint.newRangeQuery("point", 0.0F, Float.MIN_VALUE))); + assertEquals(2, s.count(FloatPoint.newRangeQuery("point", Float.MIN_VALUE, Float.MAX_VALUE))); + assertEquals(2, s.count(FloatPoint.newRangeQuery("point", Float.MAX_VALUE, Float.POSITIVE_INFINITY))); + assertEquals(2, s.count(FloatPoint.newRangeQuery("point", Float.POSITIVE_INFINITY, Float.NaN))); + + w.close(); + r.close(); + dir.close(); + } public void testAllEqual() throws Exception { int numValues = atLeast(10000); - long value = randomValue(false); + long value = randomValue(); long[] values = new long[numValues]; if (VERBOSE) { @@ -256,7 +377,7 @@ private void doTestRandomLongs(int count) throws Exception { // Identical to old value values[ord] = values[random().nextInt(ord)]; } else { - values[ord] = randomValue(false); + values[ord] = randomValue(); } ids[ord] = id; @@ -393,21 +514,19 @@ private void _run() throws Exception { NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id"); for (int iter=0;iter= lower && values[id] <= upper; if (hits.get(docID) != expected) { // We do exact quantized comparison so the bbox query should never disagree: fail(Thread.currentThread().getName() + ": iter=" + iter + " id=" + id + " docID=" + docID + " value=" + values[id] + " (range: " + lower + " TO " + upper + ") expected " + expected + " but got: " + hits.get(docID) + " deleted?=" + deleted.get(id) + " query=" + query); @@ -662,32 +771,20 @@ private void _run() throws Exception { for (int iter=0;iter 0) { + if (StringHelper.compare(bytesPerDim, lower[dim], 0, upper[dim], 0) > 0) { byte[] x = lower[dim]; lower[dim] = upper[dim]; upper[dim] = x; } - - includeLower[dim] = random().nextBoolean(); - includeUpper[dim] = random().nextBoolean(); } if (VERBOSE) { @@ -695,13 +792,12 @@ private void _run() throws Exception { for(int dim=0;dim= lower) && (upper == null || value <= upper); - } - static String bytesToString(byte[] bytes) { if (bytes == null) { return "null"; @@ -792,28 +871,16 @@ static String bytesToString(byte[] bytes) { return new BytesRef(bytes).toString(); } - private static boolean matches(int bytesPerDim, byte[][] lower, boolean[] includeLower, byte[][] upper, boolean[] includeUpper, byte[][] value) { + private static boolean matches(int bytesPerDim, byte[][] lower, byte[][] upper, byte[][] value) { int numDims = lower.length; for(int dim=0;dim 0 || (cmp == 0 && includeUpper[dim] == false)) { + if (StringHelper.compare(bytesPerDim, value[dim], 0, upper[dim], 0) > 0) { // Value is above the upper bound, on this dim return false; } @@ -822,13 +889,9 @@ private static boolean matches(int bytesPerDim, byte[][] lower, boolean[] includ return true; } - private static Long randomValue(boolean allowNull) { + private static long randomValue() { if (valueRange == 0) { - if (allowNull && random().nextInt(10) == 1) { - return null; - } else { - return random().nextLong(); - } + return random().nextLong(); } else { return valueMid + TestUtil.nextInt(random(), -valueRange, valueRange); } @@ -851,9 +914,9 @@ public void testMinMaxLong() throws Exception { IndexSearcher s = newSearcher(r, false); - assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, 0L, true))); - assertEquals(1, s.count(LongPoint.newRangeQuery("value", 0L, true, Long.MAX_VALUE, true))); - assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true))); + assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, 0L))); + assertEquals(1, s.count(LongPoint.newRangeQuery("value", 0L, Long.MAX_VALUE))); + assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, Long.MAX_VALUE))); IOUtils.close(r, w, dir); } @@ -889,51 +952,15 @@ public void testBasicSortedSet() throws Exception { IndexSearcher s = newSearcher(r, false); - assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", - toUTF8("aaa"), - true, - toUTF8("bbb"), - true))); - assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", - toUTF8("c", 3), - true, - toUTF8("e", 3), - true))); - assertEquals(2, s.count(BinaryPoint.newRangeQuery("value", - toUTF8("a", 3), - true, - toUTF8("z", 3), - true))); - assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", - null, - true, - toUTF8("abc"), - true))); - assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", - toUTF8("a", 3), - true, - toUTF8("abc"), - true))); - assertEquals(0, s.count(BinaryPoint.newRangeQuery("value", - toUTF8("a", 3), - true, - toUTF8("abc"), - false))); - assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", - toUTF8("def"), - true, - null, - false))); - assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", - toUTF8(("def")), - true, - toUTF8("z", 3), - true))); - assertEquals(0, s.count(BinaryPoint.newRangeQuery("value", - toUTF8("def"), - false, - toUTF8("z", 3), - true))); + assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", toUTF8("aaa"), toUTF8("bbb")))); + assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", toUTF8("c", 3), toUTF8("e", 3)))); + assertEquals(2, s.count(BinaryPoint.newRangeQuery("value", toUTF8("a", 3), toUTF8("z", 3)))); + assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", toUTF8("", 3), toUTF8("abc")))); + assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", toUTF8("a", 3), toUTF8("abc")))); + assertEquals(0, s.count(BinaryPoint.newRangeQuery("value", toUTF8("a", 3), toUTF8("abb")))); + assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", toUTF8("def"), toUTF8("zzz")))); + assertEquals(1, s.count(BinaryPoint.newRangeQuery("value", toUTF8(("def")), toUTF8("z", 3)))); + assertEquals(0, s.count(BinaryPoint.newRangeQuery("value", toUTF8("deg"), toUTF8("z", 3)))); IOUtils.close(r, w, dir); } @@ -954,12 +981,10 @@ public void testLongMinMaxNumeric() throws Exception { IndexSearcher s = newSearcher(r, false); - assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true))); - assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, false))); - assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, false, Long.MAX_VALUE, true))); - assertEquals(0, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, false, Long.MAX_VALUE, false))); - - assertEquals(2, s.count(BinaryPoint.newRangeQuery("value", (byte[]) null, true, null, true))); + assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, Long.MAX_VALUE))); + assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, Long.MAX_VALUE-1))); + assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE+1, Long.MAX_VALUE))); + assertEquals(0, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE+1, Long.MAX_VALUE-1))); IOUtils.close(r, w, dir); } @@ -980,12 +1005,10 @@ public void testLongMinMaxSortedSet() throws Exception { IndexSearcher s = newSearcher(r, false); - assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true))); - assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, true, Long.MAX_VALUE, false))); - assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, false, Long.MAX_VALUE, true))); - assertEquals(0, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, false, Long.MAX_VALUE, false))); - - assertEquals(2, s.count(LongPoint.newRangeQuery("value", (Long) null, true, null, true))); + assertEquals(2, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, Long.MAX_VALUE))); + assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE, Long.MAX_VALUE-1))); + assertEquals(1, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE+1, Long.MAX_VALUE))); + assertEquals(0, s.count(LongPoint.newRangeQuery("value", Long.MIN_VALUE+1, Long.MAX_VALUE-1))); IOUtils.close(r, w, dir); } @@ -1004,10 +1027,8 @@ public void testSortedSetNoOrdsMatch() throws Exception { IndexReader r = w.getReader(); - IndexSearcher s = newSearcher(r, false); - assertEquals(0, s.count(BinaryPoint.newRangeQuery("value", toUTF8("m"), true, toUTF8("n"), false))); - - assertEquals(2, s.count(BinaryPoint.newRangeQuery("value", (byte[]) null, true, null, true))); + IndexSearcher s = newSearcher(r,false); + assertEquals(0, s.count(BinaryPoint.newRangeQuery("value", toUTF8("m"), toUTF8("m")))); IOUtils.close(r, w, dir); } @@ -1027,7 +1048,7 @@ public void testNumericNoValuesMatch() throws Exception { IndexReader r = w.getReader(); IndexSearcher s = new IndexSearcher(r); - assertEquals(0, s.count(LongPoint.newRangeQuery("value", 17L, true, 13L, false))); + assertEquals(0, s.count(LongPoint.newRangeQuery("value", 17L, 13L))); IOUtils.close(r, w, dir); } @@ -1042,7 +1063,7 @@ public void testNoDocs() throws Exception { IndexReader r = w.getReader(); IndexSearcher s = newSearcher(r, false); - assertEquals(0, s.count(LongPoint.newRangeQuery("value", 17L, true, 13L, false))); + assertEquals(0, s.count(LongPoint.newRangeQuery("value", 17L, 13L))); IOUtils.close(r, w, dir); } @@ -1061,41 +1082,16 @@ public void testWrongNumDims() throws Exception { // no wrapping, else the exc might happen in executor thread: IndexSearcher s = new IndexSearcher(r); byte[][] point = new byte[2][]; + point[0] = new byte[8]; + point[1] = new byte[8]; IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - s.count(BinaryPoint.newMultiRangeQuery("value", point, new boolean[] {true, true}, point, new boolean[] {true, true})); + s.count(BinaryPoint.newRangeQuery("value", point, point)); }); assertEquals("field=\"value\" was indexed with numDims=1 but this query has numDims=2", expected.getMessage()); IOUtils.close(r, w, dir); } - /** ensure good exception when boolean[]s for inclusive have wrong length */ - public void testWrongNumBooleans() throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(); - iwc.setCodec(getCodec()); - RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); - Document doc = new Document(); - doc.add(new LongPoint("value", 1L, 2L)); - w.addDocument(doc); - - IndexReader r = w.getReader(); - - // no wrapping, else the exc might happen in executor thread: - IndexSearcher s = new IndexSearcher(r); - IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - s.count(LongPoint.newMultiRangeQuery("value", new Long[] { 1L, 2L }, new boolean[] {true}, new Long[] { 1L, 2L }, new boolean[] {true, true})); - }); - assertEquals("lowerInclusive has length=1 but expected=2", expected.getMessage()); - - expected = expectThrows(IllegalArgumentException.class, () -> { - s.count(LongPoint.newMultiRangeQuery("value", new Long[] { 1L, 2L }, new boolean[] {true, true}, new Long[] { 1L, 2L }, new boolean[] {true})); - }); - assertEquals("upperInclusive has length=1 but expected=2", expected.getMessage()); - - IOUtils.close(r, w, dir); - } - public void testWrongNumBytes() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(); @@ -1112,7 +1108,7 @@ public void testWrongNumBytes() throws Exception { byte[][] point = new byte[1][]; point[0] = new byte[10]; IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - s.count(BinaryPoint.newMultiRangeQuery("value", point, new boolean[] {true}, point, new boolean[] {true})); + s.count(BinaryPoint.newRangeQuery("value", point, point)); }); assertEquals("field=\"value\" was indexed with bytesPerDim=8 but this query has bytesPerDim=10", expected.getMessage()); @@ -1228,31 +1224,25 @@ public void testExactPoints() throws Exception { public void testToString() throws Exception { // ints - assertEquals("field:[1 TO 2}", IntPoint.newRangeQuery("field", 1, true, 2, false).toString()); - assertEquals("field:{-2 TO 1]", IntPoint.newRangeQuery("field", -2, false, 1, true).toString()); - assertEquals("field:[* TO 2}", IntPoint.newRangeQuery("field", null, true, 2, false).toString()); + assertEquals("field:[1 TO 2]", IntPoint.newRangeQuery("field", 1, 2).toString()); + assertEquals("field:[-2 TO 1]", IntPoint.newRangeQuery("field", -2, 1).toString()); // longs - assertEquals("field:[1099511627776 TO 2199023255552}", LongPoint.newRangeQuery("field", 1L<<40, true, 1L<<41, false).toString()); - assertEquals("field:{-5 TO 6]", LongPoint.newRangeQuery("field", -5L, false, 6L, true).toString()); - assertEquals("field:[* TO 2}", LongPoint.newRangeQuery("field", null, true, 2L, false).toString()); + assertEquals("field:[1099511627776 TO 2199023255552]", LongPoint.newRangeQuery("field", 1L<<40, 1L<<41).toString()); + assertEquals("field:[-5 TO 6]", LongPoint.newRangeQuery("field", -5L, 6L).toString()); // floats - assertEquals("field:[1.3 TO 2.5}", FloatPoint.newRangeQuery("field", 1.3F, true, 2.5F, false).toString()); - assertEquals("field:{-2.9 TO 1.0]", FloatPoint.newRangeQuery("field", -2.9F, false, 1.0F, true).toString()); - assertEquals("field:{-2.9 TO *]", FloatPoint.newRangeQuery("field", -2.9F, false, null, true).toString()); + assertEquals("field:[1.3 TO 2.5]", FloatPoint.newRangeQuery("field", 1.3F, 2.5F).toString()); + assertEquals("field:[-2.9 TO 1.0]", FloatPoint.newRangeQuery("field", -2.9F, 1.0F).toString()); // doubles - assertEquals("field:[1.3 TO 2.5}", DoublePoint.newRangeQuery("field", 1.3, true, 2.5, false).toString()); - assertEquals("field:{-2.9 TO 1.0]", DoublePoint.newRangeQuery("field", -2.9, false, 1.0, true).toString()); - assertEquals("field:{-2.9 TO *]", DoublePoint.newRangeQuery("field", -2.9, false, null, true).toString()); + assertEquals("field:[1.3 TO 2.5]", DoublePoint.newRangeQuery("field", 1.3, 2.5).toString()); + assertEquals("field:[-2.9 TO 1.0]", DoublePoint.newRangeQuery("field", -2.9, 1.0).toString()); // n-dimensional double - assertEquals("field:[1.3 TO 2.5},{-2.9 TO 1.0]", DoublePoint.newMultiRangeQuery("field", - new Double[] { 1.3, -2.9 }, - new boolean[] { true, false }, - new Double[] { 2.5, 1.0 }, - new boolean[] { false, true }).toString()); + assertEquals("field:[1.3 TO 2.5],[-2.9 TO 1.0]", DoublePoint.newRangeQuery("field", + new double[] { 1.3, -2.9 }, + new double[] { 2.5, 1.0 }).toString()); } @@ -1430,7 +1420,7 @@ public int compare(byte[] a, byte[] b) { return new PointInSetQuery(field, numDims, Integer.BYTES, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { @@ -1818,7 +1808,7 @@ public void testInvalidPointInSetQuery() throws Exception { IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { new PointInSetQuery("foo", 3, 4, - new BytesRefIterator() { + new PointInSetQuery.Stream() { @Override public BytesRef next() { return new BytesRef(new byte[3]); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java b/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java index 2edebb3d7248..9c7ada8c68f7 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java @@ -26,7 +26,7 @@ public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase { public void testCostlyFilter() { assertTrue(UsageTrackingQueryCachingPolicy.isCostly(new PrefixQuery(new Term("field", "prefix")))); - assertTrue(UsageTrackingQueryCachingPolicy.isCostly(IntPoint.newRangeQuery("intField", 1, true, 1000, true))); + assertTrue(UsageTrackingQueryCachingPolicy.isCostly(IntPoint.newRangeQuery("intField", 1, 1000))); assertFalse(UsageTrackingQueryCachingPolicy.isCostly(new TermQuery(new Term("field", "value")))); } diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java index d9e944179cee..fff3bff9f6d1 100644 --- a/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java +++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java @@ -179,7 +179,7 @@ public static Query getBoundingBoxQuery(double originLat, double originLng, doub BooleanQuery.Builder f = new BooleanQuery.Builder(); // Add latitude range filter: - f.add(DoublePoint.newRangeQuery("latitude", Math.toDegrees(minLat), true, Math.toDegrees(maxLat), true), + f.add(DoublePoint.newRangeQuery("latitude", Math.toDegrees(minLat), Math.toDegrees(maxLat)), BooleanClause.Occur.FILTER); // Add longitude range filter: @@ -187,13 +187,13 @@ public static Query getBoundingBoxQuery(double originLat, double originLng, doub // The bounding box crosses the international date // line: BooleanQuery.Builder lonF = new BooleanQuery.Builder(); - lonF.add(DoublePoint.newRangeQuery("longitude", Math.toDegrees(minLng), true, null, true), + lonF.add(DoublePoint.newRangeQuery("longitude", Math.toDegrees(minLng), Double.POSITIVE_INFINITY), BooleanClause.Occur.SHOULD); - lonF.add(DoublePoint.newRangeQuery("longitude", null, true, Math.toDegrees(maxLng), true), + lonF.add(DoublePoint.newRangeQuery("longitude", Double.NEGATIVE_INFINITY, Math.toDegrees(maxLng)), BooleanClause.Occur.SHOULD); f.add(lonF.build(), BooleanClause.Occur.MUST); } else { - f.add(DoublePoint.newRangeQuery("longitude", Math.toDegrees(minLng), true, Math.toDegrees(maxLng), true), + f.add(DoublePoint.newRangeQuery("longitude", Math.toDegrees(minLng), Math.toDegrees(maxLng)), BooleanClause.Occur.FILTER); } diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java index 96d7c17cf9f0..8fc794909e30 100644 --- a/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java +++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java @@ -105,8 +105,7 @@ public TopDocs drillDown(LongRange range) throws IOException { // documents ("browse only"): DrillDownQuery q = new DrillDownQuery(getConfig()); - q.add("timestamp", LongPoint.newRangeQuery("timestamp", range.min, range.minInclusive, range.max, range.maxInclusive)); - + q.add("timestamp", LongPoint.newRangeQuery("timestamp", range.min, range.max)); return searcher.search(q, 10); } diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java index 362dd7b7e97e..c246d7489dd4 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java @@ -38,42 +38,29 @@ * * @lucene.experimental */ public final class DoubleRange extends Range { - final double minIncl; - final double maxIncl; - - /** Minimum. */ + /** Minimum (inclusive). */ public final double min; - /** Maximum. */ + /** Maximum (inclusive. */ public final double max; - /** True if the minimum value is inclusive. */ - public final boolean minInclusive; - - /** True if the maximum value is inclusive. */ - public final boolean maxInclusive; - /** Create a DoubleRange. */ public DoubleRange(String label, double minIn, boolean minInclusive, double maxIn, boolean maxInclusive) { super(label); - this.min = minIn; - this.max = maxIn; - this.minInclusive = minInclusive; - this.maxInclusive = maxInclusive; // TODO: if DoubleDocValuesField used // LegacyNumericUtils.doubleToSortableLong format (instead of // Double.doubleToRawLongBits) we could do comparisons // in long space - if (Double.isNaN(min)) { + if (Double.isNaN(minIn)) { throw new IllegalArgumentException("min cannot be NaN"); } if (!minInclusive) { minIn = Math.nextUp(minIn); } - if (Double.isNaN(max)) { + if (Double.isNaN(maxIn)) { throw new IllegalArgumentException("max cannot be NaN"); } if (!maxInclusive) { @@ -85,24 +72,24 @@ public DoubleRange(String label, double minIn, boolean minInclusive, double maxI failNoMatch(); } - this.minIncl = minIn; - this.maxIncl = maxIn; + this.min = minIn; + this.max = maxIn; } /** True if this range accepts the provided value. */ public boolean accept(double value) { - return value >= minIncl && value <= maxIncl; + return value >= min && value <= max; } LongRange toLongRange() { return new LongRange(label, - LegacyNumericUtils.doubleToSortableLong(minIncl), true, - LegacyNumericUtils.doubleToSortableLong(maxIncl), true); + LegacyNumericUtils.doubleToSortableLong(min), true, + LegacyNumericUtils.doubleToSortableLong(max), true); } @Override public String toString() { - return "DoubleRange(" + minIncl + " to " + maxIncl + ")"; + return "DoubleRange(" + min + " to " + max + ")"; } private static class ValueSourceQuery extends Query { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java index 8892725bb837..a39ea7e8de9c 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java @@ -88,8 +88,8 @@ private void count(ValueSource valueSource, List matchingDocs) thr for(int i=0;i= minIncl && value <= maxIncl; + return value >= min && value <= max; } @Override public String toString() { - return "LongRange(" + minIncl + " to " + maxIncl + ")"; + return "LongRange(" + min + " to " + max + ")"; } private static class ValueSourceQuery extends Query { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/LongRangeCounter.java b/lucene/facet/src/java/org/apache/lucene/facet/range/LongRangeCounter.java index 8c0b123007d1..5c625f088671 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/range/LongRangeCounter.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/range/LongRangeCounter.java @@ -49,17 +49,17 @@ public LongRangeCounter(LongRange[] ranges) { endsMap.put(Long.MAX_VALUE, 2); for(LongRange range : ranges) { - Integer cur = endsMap.get(range.minIncl); + Integer cur = endsMap.get(range.min); if (cur == null) { - endsMap.put(range.minIncl, 1); + endsMap.put(range.min, 1); } else { - endsMap.put(range.minIncl, cur.intValue() | 1); + endsMap.put(range.min, cur.intValue() | 1); } - cur = endsMap.get(range.maxIncl); + cur = endsMap.get(range.max); if (cur == null) { - endsMap.put(range.maxIncl, 2); + endsMap.put(range.max, 2); } else { - endsMap.put(range.maxIncl, cur.intValue() | 2); + endsMap.put(range.max, cur.intValue() | 2); } } @@ -276,7 +276,7 @@ static void indent(StringBuilder sb, int depth) { /** Recursively assigns range outputs to each node. */ void addOutputs(int index, LongRange range) { - if (start >= range.minIncl && end <= range.maxIncl) { + if (start >= range.min && end <= range.max) { // Our range is fully included in the incoming // range; add to our output list: if (outputs == null) { diff --git a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java index c4233c04710b..e7e5d5720883 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java @@ -280,7 +280,7 @@ protected boolean scoreSubDocsAtOnce() { // Third search, drill down on "less than or equal to 10": ddq = new DrillDownQuery(config); - ddq.add("field", LongPoint.newRangeQuery("field", 0L, true, 10L, true)); + ddq.add("field", LongPoint.newRangeQuery("field", 0L, 10L)); dsr = ds.search(null, ddq, 10); assertEquals(11, dsr.hits.totalHits); @@ -460,9 +460,9 @@ public void testRandomLongs() throws Exception { Query fastMatchQuery; if (random().nextBoolean()) { if (random().nextBoolean()) { - fastMatchQuery = LongPoint.newRangeQuery("field", minValue, true, maxValue, true); + fastMatchQuery = LongPoint.newRangeQuery("field", minValue, maxValue); } else { - fastMatchQuery = LongPoint.newRangeQuery("field", minAcceptedValue, true, maxAcceptedValue, true); + fastMatchQuery = LongPoint.newRangeQuery("field", minAcceptedValue, maxAcceptedValue); } } else { fastMatchQuery = null; @@ -484,7 +484,7 @@ public void testRandomLongs() throws Exception { // Test drill-down: DrillDownQuery ddq = new DrillDownQuery(config); if (random().nextBoolean()) { - ddq.add("field", LongPoint.newRangeQuery("field", range.min, range.minInclusive, range.max, range.maxInclusive)); + ddq.add("field", LongPoint.newRangeQuery("field", range.min, range.max)); } else { ddq.add("field", range.getQuery(fastMatchQuery, vs)); } @@ -615,9 +615,9 @@ public void testRandomFloats() throws Exception { Query fastMatchQuery; if (random().nextBoolean()) { if (random().nextBoolean()) { - fastMatchQuery = FloatPoint.newRangeQuery("field", minValue, true, maxValue, true); + fastMatchQuery = FloatPoint.newRangeQuery("field", minValue, maxValue); } else { - fastMatchQuery = FloatPoint.newRangeQuery("field", minAcceptedValue, true, maxAcceptedValue, true); + fastMatchQuery = FloatPoint.newRangeQuery("field", minAcceptedValue, maxAcceptedValue); } } else { fastMatchQuery = null; @@ -639,7 +639,7 @@ public void testRandomFloats() throws Exception { // Test drill-down: DrillDownQuery ddq = new DrillDownQuery(config); if (random().nextBoolean()) { - ddq.add("field", FloatPoint.newRangeQuery("field", (float) range.min, range.minInclusive, (float) range.max, range.maxInclusive)); + ddq.add("field", FloatPoint.newRangeQuery("field", (float) range.min, (float) range.max)); } else { ddq.add("field", range.getQuery(fastMatchQuery, vs)); } @@ -754,9 +754,9 @@ public void testRandomDoubles() throws Exception { Query fastMatchFilter; if (random().nextBoolean()) { if (random().nextBoolean()) { - fastMatchFilter = DoublePoint.newRangeQuery("field", minValue, true, maxValue, true); + fastMatchFilter = DoublePoint.newRangeQuery("field", minValue, maxValue); } else { - fastMatchFilter = DoublePoint.newRangeQuery("field", minAcceptedValue, true, maxAcceptedValue, true); + fastMatchFilter = DoublePoint.newRangeQuery("field", minAcceptedValue, maxAcceptedValue); } } else { fastMatchFilter = null; @@ -778,7 +778,7 @@ public void testRandomDoubles() throws Exception { // Test drill-down: DrillDownQuery ddq = new DrillDownQuery(config); if (random().nextBoolean()) { - ddq.add("field", DoublePoint.newRangeQuery("field", range.min, range.minInclusive, range.max, range.maxInclusive)); + ddq.add("field", DoublePoint.newRangeQuery("field", range.min, range.max)); } else { ddq.add("field", range.getQuery(fastMatchFilter, vs)); } diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java index edc91f6e9d3e..187f4a546f52 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java @@ -583,7 +583,7 @@ public void testExternalReader() throws Exception { public void testDimensionalRangeQuery() throws Exception { // doesn't currently highlight, but make sure it doesn't cause exception either - query = IntPoint.newRangeQuery(NUMERIC_FIELD_NAME, 2, true, 6, true); + query = IntPoint.newRangeQuery(NUMERIC_FIELD_NAME, 2, 6); searcher = newSearcher(reader); hits = searcher.search(query, 100); int maxNumFragmentsRequired = 2; diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java index cb9f5c142d3a..9f765ab1865e 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.document; -import java.io.IOException; import java.math.BigInteger; import java.util.Arrays; @@ -24,7 +23,6 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; /** @@ -36,16 +34,22 @@ *

* This field defines static factory methods for creating common queries: *

    - *
  • {@link #newExactQuery newExactQuery()} for matching an exact 1D point. - *
  • {@link #newRangeQuery newRangeQuery()} for matching a 1D range. - *
  • {@link #newMultiRangeQuery newMultiRangeQuery()} for matching points/ranges in n-dimensional space. - *
  • {@link #newSetQuery newSetQuery()} for matching a set of 1D values. + *
  • {@link #newExactQuery(String, BigInteger)} for matching an exact 1D point. + *
  • {@link #newSetQuery(String, BigInteger...)} for matching a set of 1D values. + *
  • {@link #newRangeQuery(String, BigInteger, BigInteger)} for matching a 1D range. + *
  • {@link #newRangeQuery(String, BigInteger[], BigInteger[])} for matching points/ranges in n-dimensional space. *
*/ public class BigIntegerPoint extends Field { /** The number of bytes per dimension: 128 bits. */ public static final int BYTES = 16; + + /** A constant holding the minimum value a BigIntegerPoint can have, -2127. */ + public static final BigInteger MIN_VALUE = BigInteger.ONE.shiftLeft(BYTES * 8 - 1).negate(); + + /** A constant holding the maximum value a BigIntegerPoint can have, 2127-1. */ + public static final BigInteger MAX_VALUE = BigInteger.ONE.shiftLeft(BYTES * 8 - 1).subtract(BigInteger.ONE); private static FieldType getType(int numDims) { FieldType type = new FieldType(); @@ -128,10 +132,8 @@ public String toString() { private static byte[][] encode(BigInteger value[]) { byte[][] encoded = new byte[value.length][]; for (int i = 0; i < value.length; i++) { - if (value[i] != null) { - encoded[i] = new byte[BYTES]; - encodeDimension(value[i], encoded[i], 0); - } + encoded[i] = new byte[BYTES]; + encodeDimension(value[i], encoded[i], 0); } return encoded; } @@ -154,65 +156,61 @@ public static BigInteger decodeDimension(byte value[], int offset) { * Create a query for matching an exact big integer value. *

* This is for simple one-dimension points, for multidimensional points use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, BigInteger[], BigInteger[])} instead. * * @param field field name. must not be {@code null}. - * @param value exact value - * @throws IllegalArgumentException if {@code field} is null. + * @param value exact value. must not be {@code null}. + * @throws IllegalArgumentException if {@code field} is null or {@code value} is null. * @return a query matching documents with this exact value */ public static Query newExactQuery(String field, BigInteger value) { - return newRangeQuery(field, value, true, value, true); + return newRangeQuery(field, value, value); } /** * Create a range query for big integer values. *

* This is for simple one-dimension ranges, for multidimensional ranges use - * {@link #newMultiRangeQuery newMultiRangeQuery()} instead. + * {@link #newRangeQuery(String, BigInteger[], BigInteger[])} instead. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting the {@code lowerValue} or {@code upperValue} to {@code null}. + * by setting {@code lowerValue = BigIntegerPoint.MIN_VALUE} + * or {@code upperValue = BigIntegerPoint.MAX_VALUE}. *

- * By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Ranges are inclusive. For exclusive ranges, pass {@code lowerValue.add(BigInteger.ONE)} + * or {@code upperValue.subtract(BigInteger.ONE)} * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} means "open". - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} means "open". - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null. + * @param lowerValue lower portion of the range (inclusive). must not be {@code null}. + * @param upperValue upper portion of the range (inclusive). must not be {@code null}. + * @throws IllegalArgumentException if {@code field} is null, {@code lowerValue} is null, or {@code upperValue} is null. * @return a query matching documents within this range. */ - public static Query newRangeQuery(String field, BigInteger lowerValue, boolean lowerInclusive, BigInteger upperValue, boolean upperInclusive) { - return newMultiRangeQuery(field, - new BigInteger[] { lowerValue }, - new boolean[] { lowerInclusive }, - new BigInteger[] { upperValue }, - new boolean[] { upperInclusive }); + public static Query newRangeQuery(String field, BigInteger lowerValue, BigInteger upperValue) { + PointRangeQuery.checkArgs(field, lowerValue, upperValue); + return newRangeQuery(field, new BigInteger[] { lowerValue }, new BigInteger[] { upperValue }); } /** - * Create a multidimensional range query for big integer values. + * Create a range query for n-dimensional big integer values. *

* You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}. + * by setting {@code lowerValue[i] = BigIntegerPoint.MIN_VALUE} + * or {@code upperValue[i] = BigIntegerPoint.MAX_VALUE}. *

- * By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. + * Ranges are inclusive. For exclusive ranges, pass {@code lowerValue[i].add(BigInteger.ONE)} + * or {@code upperValue[i].subtract(BigInteger.ONE)} * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension. - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension. - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length} + * @param lowerValue lower portion of the range (inclusive). must not be {@code null}. + * @param upperValue upper portion of the range (inclusive). must not be {@code null}. + * @throws IllegalArgumentException if {@code field} is null, if {@code lowerValue} is null, if {@code upperValue} is null, + * or if {@code lowerValue.length != upperValue.length} * @return a query matching documents within this range. */ - public static Query newMultiRangeQuery(String field, BigInteger[] lowerValue, boolean lowerInclusive[], BigInteger[] upperValue, boolean upperInclusive[]) { + public static Query newRangeQuery(String field, BigInteger[] lowerValue, BigInteger[] upperValue) { PointRangeQuery.checkArgs(field, lowerValue, upperValue); - return new PointRangeQuery(field, BigIntegerPoint.encode(lowerValue), lowerInclusive, BigIntegerPoint.encode(upperValue), upperInclusive) { + return new PointRangeQuery(field, BigIntegerPoint.encode(lowerValue), BigIntegerPoint.encode(upperValue)) { @Override protected String toString(int dimension, byte[] value) { return BigIntegerPoint.decodeDimension(value, 0).toString(); @@ -224,30 +222,29 @@ protected String toString(int dimension, byte[] value) { * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. * * @param field field name. must not be {@code null}. - * @param valuesIn all values to match + * @param values all values to match */ - public static Query newSetQuery(String field, BigInteger... valuesIn) throws IOException { + public static Query newSetQuery(String field, BigInteger... values) { // Don't unexpectedly change the user's incoming values array: - BigInteger[] values = valuesIn.clone(); - - Arrays.sort(values); + BigInteger[] sortedValues = values.clone(); + Arrays.sort(sortedValues); - final BytesRef value = new BytesRef(new byte[BYTES]); + final BytesRef encoded = new BytesRef(new byte[BYTES]); return new PointInSetQuery(field, 1, BYTES, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { - if (upto == values.length) { + if (upto == sortedValues.length) { return null; } else { - encodeDimension(values[upto], value.bytes, 0); + encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; - return value; + return encoded; } } }) { diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java index 1a73dc1b9f50..a0623b367927 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.document; -import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Arrays; @@ -25,7 +24,6 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; /** * An indexed 128-bit {@code InetAddress} field. @@ -36,10 +34,10 @@ *

* This field defines static factory methods for creating common queries: *

    - *
  • {@link #newExactQuery newExactQuery()} for matching an exact network address. - *
  • {@link #newPrefixQuery newPrefixQuery()} for matching a network based on CIDR prefix. - *
  • {@link #newRangeQuery newRangeQuery()} for matching arbitrary network address ranges. - *
  • {@link #newSetQuery newSetQuery()} for matching a set of 1D values. + *
  • {@link #newExactQuery(String, InetAddress)} for matching an exact network address. + *
  • {@link #newPrefixQuery(String, InetAddress, int)} for matching a network based on CIDR prefix. + *
  • {@link #newRangeQuery(String, InetAddress, InetAddress)} for matching arbitrary network address ranges. + *
  • {@link #newSetQuery(String, InetAddress...)} for matching a set of 1D values. *
*

* This field supports both IPv4 and IPv6 addresses: IPv4 addresses are converted @@ -149,7 +147,7 @@ public static InetAddress decode(byte value[]) { * @return a query matching documents with this exact value */ public static Query newExactQuery(String field, InetAddress value) { - return newRangeQuery(field, value, true, value, true); + return newRangeQuery(field, value, value); } /** @@ -162,6 +160,9 @@ public static Query newExactQuery(String field, InetAddress value) { * @return a query matching documents with addresses contained within this network */ public static Query newPrefixQuery(String field, InetAddress value, int prefixLength) { + if (value == null) { + throw new IllegalArgumentException("InetAddress cannot be null"); + } if (prefixLength < 0 || prefixLength > 8 * value.getAddress().length) { throw new IllegalArgumentException("illegal prefixLength '" + prefixLength + "'. Must be 0-32 for IPv4 ranges, 0-128 for IPv6 ranges"); } @@ -173,7 +174,7 @@ public static Query newPrefixQuery(String field, InetAddress value, int prefixLe upper[i >> 3] |= 1 << (i & 7); } try { - return newRangeQuery(field, InetAddress.getByAddress(lower), true, InetAddress.getByAddress(upper), true); + return newRangeQuery(field, InetAddress.getByAddress(lower), InetAddress.getByAddress(upper)); } catch (UnknownHostException e) { throw new AssertionError(e); // values are coming from InetAddress } @@ -181,31 +182,21 @@ public static Query newPrefixQuery(String field, InetAddress value, int prefixLe /** * Create a range query for network addresses. - *

- * You can have half-open ranges (which are in fact </≤ or >/≥ queries) - * by setting the {@code lowerValue} or {@code upperValue} to {@code null}. - *

- * By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will - * match all documents excluding the bounds, with inclusive on, the boundaries are hits, too. * * @param field field name. must not be {@code null}. - * @param lowerValue lower portion of the range. {@code null} means "open". - * @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded. - * @param upperValue upper portion of the range. {@code null} means "open". - * @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded. - * @throws IllegalArgumentException if {@code field} is null. + * @param lowerValue lower portion of the range (inclusive). must not be null. + * @param upperValue upper portion of the range (inclusive). must not be null. + * @throws IllegalArgumentException if {@code field} is null, {@code lowerValue} is null, + * or {@code upperValue} is null * @return a query matching documents within this range. */ - public static Query newRangeQuery(String field, InetAddress lowerValue, boolean lowerInclusive, InetAddress upperValue, boolean upperInclusive) { + public static Query newRangeQuery(String field, InetAddress lowerValue, InetAddress upperValue) { + PointRangeQuery.checkArgs(field, lowerValue, upperValue); byte[][] lowerBytes = new byte[1][]; - if (lowerValue != null) { - lowerBytes[0] = encode(lowerValue); - } + lowerBytes[0] = encode(lowerValue); byte[][] upperBytes = new byte[1][]; - if (upperValue != null) { - upperBytes[0] = encode(upperValue); - } - return new PointRangeQuery(field, lowerBytes, new boolean[] { lowerInclusive }, upperBytes, new boolean[] { upperInclusive }) { + upperBytes[0] = encode(upperValue); + return new PointRangeQuery(field, lowerBytes, upperBytes) { @Override protected String toString(int dimension, byte[] value) { return decode(value).getHostAddress(); // for ranges, the range itself is already bracketed @@ -217,31 +208,30 @@ protected String toString(int dimension, byte[] value) { * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. * * @param field field name. must not be {@code null}. - * @param valuesIn all values to match + * @param values all values to match */ - public static Query newSetQuery(String field, InetAddress... valuesIn) throws IOException { + public static Query newSetQuery(String field, InetAddress... values) { // Don't unexpectedly change the user's incoming values array: - InetAddress[] values = valuesIn.clone(); - - Arrays.sort(values); + InetAddress[] sortedValues = values.clone(); + Arrays.sort(sortedValues); - final BytesRef value = new BytesRef(new byte[BYTES]); + final BytesRef encoded = new BytesRef(new byte[BYTES]); return new PointInSetQuery(field, 1, BYTES, - new BytesRefIterator() { + new PointInSetQuery.Stream() { int upto; @Override public BytesRef next() { - if (upto == values.length) { + if (upto == sortedValues.length) { return null; } else { - value.bytes = encode(values[upto]); - assert value.bytes.length == value.length; + encoded.bytes = encode(sortedValues[upto]); + assert encoded.bytes.length == encoded.length; upto++; - return value; + return encoded; } } }) { diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java index b0902f533ad3..aeb0a0f3e380 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java @@ -233,12 +233,17 @@ public static Query newBoxQuery(String field, double minLatitude, double maxLati // E.g.: maxLon = -179, minLon = 179 byte[][] leftOpen = new byte[2][]; leftOpen[0] = lower[0]; - // leave longitude open (null) + // leave longitude open + leftOpen[1] = new byte[Integer.BYTES]; + NumericUtils.intToBytes(Integer.MIN_VALUE, leftOpen[1], 0); Query left = newBoxInternal(field, leftOpen, upper); q.add(new BooleanClause(left, BooleanClause.Occur.SHOULD)); + byte[][] rightOpen = new byte[2][]; rightOpen[0] = upper[0]; - // leave longitude open (null) + // leave longitude open + rightOpen[1] = new byte[Integer.BYTES]; + NumericUtils.intToBytes(Integer.MAX_VALUE, rightOpen[1], 0); Query right = newBoxInternal(field, lower, rightOpen); q.add(new BooleanClause(right, BooleanClause.Occur.SHOULD)); return new ConstantScoreQuery(q.build()); @@ -248,7 +253,7 @@ public static Query newBoxQuery(String field, double minLatitude, double maxLati } private static Query newBoxInternal(String field, byte[][] min, byte[][] max) { - return new PointRangeQuery(field, min, new boolean[] { true, true }, max, new boolean[] { false, false }) { + return new PointRangeQuery(field, min, max) { @Override protected String toString(int dimension, byte[] value) { if (dimension == 0) { diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java index f6d407de6d7b..500c2a320612 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java @@ -42,7 +42,7 @@ public void testBasics() throws Exception { IndexReader reader = writer.getReader(); IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(BigIntegerPoint.newExactQuery("field", large))); - assertEquals(1, searcher.count(BigIntegerPoint.newRangeQuery("field", large.subtract(BigInteger.ONE), false, large.add(BigInteger.ONE), false))); + assertEquals(1, searcher.count(BigIntegerPoint.newRangeQuery("field", large.subtract(BigInteger.ONE), large.add(BigInteger.ONE)))); assertEquals(1, searcher.count(BigIntegerPoint.newSetQuery("field", large))); assertEquals(0, searcher.count(BigIntegerPoint.newSetQuery("field", large.subtract(BigInteger.ONE)))); assertEquals(0, searcher.count(BigIntegerPoint.newSetQuery("field"))); @@ -67,7 +67,7 @@ public void testNegative() throws Exception { IndexReader reader = writer.getReader(); IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(BigIntegerPoint.newExactQuery("field", negative))); - assertEquals(1, searcher.count(BigIntegerPoint.newRangeQuery("field", negative.subtract(BigInteger.ONE), false, negative.add(BigInteger.ONE), false))); + assertEquals(1, searcher.count(BigIntegerPoint.newRangeQuery("field", negative.subtract(BigInteger.ONE), negative.add(BigInteger.ONE)))); reader.close(); writer.close(); @@ -87,12 +87,10 @@ public void testToString() throws Exception { assertEquals("BigIntegerPoint ", new BigIntegerPoint("field", BigInteger.ONE).toString()); assertEquals("BigIntegerPoint ", new BigIntegerPoint("field", BigInteger.ONE, BigInteger.valueOf(-2)).toString()); assertEquals("field:[1 TO 1]", BigIntegerPoint.newExactQuery("field", BigInteger.ONE).toString()); - assertEquals("field:{1 TO 17]", BigIntegerPoint.newRangeQuery("field", BigInteger.ONE, false, BigInteger.valueOf(17), true).toString()); - assertEquals("field:{1 TO 17],[0 TO 42}", BigIntegerPoint.newMultiRangeQuery("field", + assertEquals("field:[1 TO 17]", BigIntegerPoint.newRangeQuery("field", BigInteger.ONE, BigInteger.valueOf(17)).toString()); + assertEquals("field:[1 TO 17],[0 TO 42]", BigIntegerPoint.newRangeQuery("field", new BigInteger[] {BigInteger.ONE, BigInteger.ZERO}, - new boolean[] {false, true}, - new BigInteger[] {BigInteger.valueOf(17), BigInteger.valueOf(42)}, - new boolean[] {true, false}).toString()); + new BigInteger[] {BigInteger.valueOf(17), BigInteger.valueOf(42)}).toString()); assertEquals("field:{1}", BigIntegerPoint.newSetQuery("field", BigInteger.ONE).toString()); } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java index 9854001a2d02..d4ddb3adbfde 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java @@ -43,7 +43,7 @@ public void testBasics() throws Exception { IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(InetAddressPoint.newExactQuery("field", address))); assertEquals(1, searcher.count(InetAddressPoint.newPrefixQuery("field", address, 24))); - assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("1.2.3.3"), false, InetAddress.getByName("1.2.3.5"), false))); + assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5")))); assertEquals(1, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.4")))); assertEquals(0, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.3")))); assertEquals(0, searcher.count(InetAddressPoint.newSetQuery("field"))); @@ -69,7 +69,7 @@ public void testBasicsV6() throws Exception { IndexSearcher searcher = newSearcher(reader, false); assertEquals(1, searcher.count(InetAddressPoint.newExactQuery("field", address))); assertEquals(1, searcher.count(InetAddressPoint.newPrefixQuery("field", address, 64))); - assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("fec0::f66c"), false, InetAddress.getByName("fec0::f66e"), false))); + assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("fec0::f66c"), InetAddress.getByName("fec0::f66e")))); reader.close(); writer.close(); diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java index 1d3bfac30525..0ef948d3f6dd 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java @@ -51,7 +51,7 @@ public void testToString() throws Exception { assertEquals("LatLonPoint ",(new LatLonPoint("field", 18.313694, -65.227444)).toString()); // looks crazy due to lossiness - assertEquals("field:[17.99999997485429 TO 18.999999999068677},[-65.9999999217689 TO -64.99999998137355}", LatLonPoint.newBoxQuery("field", 18, 19, -66, -65).toString()); + assertEquals("field:[17.99999997485429 TO 18.999999999068677],[-65.9999999217689 TO -64.99999998137355]", LatLonPoint.newBoxQuery("field", 18, 19, -66, -65).toString()); // distance query does not quantize inputs assertEquals("field:18.0,19.0 +/- 25.0 meters", LatLonPoint.newDistanceQuery("field", 18, 19, 25).toString()); diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java index 47cd740d1d3c..f2ec17e96a79 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java @@ -56,7 +56,7 @@ public void testDuelNumericRangeQuery() throws IOException { iw.addDocument(doc); } if (random().nextBoolean()) { - iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, true, 10L, true)); + iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L)); } iw.commit(); final IndexReader reader = iw.getReader(); @@ -64,12 +64,10 @@ public void testDuelNumericRangeQuery() throws IOException { iw.close(); for (int i = 0; i < 100; ++i) { - final Long min = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000); - final Long max = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000); - final boolean minInclusive = random().nextBoolean(); - final boolean maxInclusive = random().nextBoolean(); - final Query q1 = LongPoint.newRangeQuery("idx", min, minInclusive, max, maxInclusive); - final Query q2 = DocValuesRangeQuery.newLongRange("dv", min, max, minInclusive, maxInclusive); + final Long min = TestUtil.nextLong(random(), -100, 1000); + final Long max = TestUtil.nextLong(random(), -100, 1000); + final Query q1 = LongPoint.newRangeQuery("idx", min, max); + final Query q2 = DocValuesRangeQuery.newLongRange("dv", min, max, true, true); assertSameMatches(searcher, q1, q2, false); } @@ -184,7 +182,7 @@ public void testApproximation() throws IOException { iw.addDocument(doc); } if (random().nextBoolean()) { - iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, true, 10L, true)); + iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L)); } iw.commit(); final IndexReader reader = iw.getReader(); @@ -192,23 +190,21 @@ public void testApproximation() throws IOException { iw.close(); for (int i = 0; i < 100; ++i) { - final Long min = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000); - final Long max = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000); - final boolean minInclusive = random().nextBoolean(); - final boolean maxInclusive = random().nextBoolean(); + final Long min = TestUtil.nextLong(random(), -100, 1000); + final Long max = TestUtil.nextLong(random(), -100, 1000); BooleanQuery.Builder ref = new BooleanQuery.Builder(); - ref.add(LongPoint.newRangeQuery("idx", min, minInclusive, max, maxInclusive), Occur.FILTER); + ref.add(LongPoint.newRangeQuery("idx", min, max), Occur.FILTER); ref.add(new TermQuery(new Term("f", "a")), Occur.MUST); BooleanQuery.Builder bq1 = new BooleanQuery.Builder(); - bq1.add(DocValuesRangeQuery.newLongRange("dv1", min, max, minInclusive, maxInclusive), Occur.FILTER); + bq1.add(DocValuesRangeQuery.newLongRange("dv1", min, max, true, true), Occur.FILTER); bq1.add(new TermQuery(new Term("f", "a")), Occur.MUST); assertSameMatches(searcher, ref.build(), bq1.build(), true); BooleanQuery.Builder bq2 = new BooleanQuery.Builder(); - bq2.add(DocValuesRangeQuery.newBytesRefRange("dv2", toSortableBytes(min), toSortableBytes(max), minInclusive, maxInclusive), Occur.FILTER); + bq2.add(DocValuesRangeQuery.newBytesRefRange("dv2", toSortableBytes(min), toSortableBytes(max), true, true), Occur.FILTER); bq2.add(new TermQuery(new Term("f", "a")), Occur.MUST); assertSameMatches(searcher, ref.build(), bq2.build(), true); diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestLatLonPointQueries.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestLatLonPointQueries.java index 8eb1be0f7e33..9f9bba2c2362 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestLatLonPointQueries.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestLatLonPointQueries.java @@ -66,15 +66,15 @@ protected Boolean rectContainsPoint(GeoRect rect, double pointLat, double pointL if (rect.minLon < rect.maxLon) { return pointLatEnc >= rectLatMinEnc && - pointLatEnc < rectLatMaxEnc && + pointLatEnc <= rectLatMaxEnc && pointLonEnc >= rectLonMinEnc && - pointLonEnc < rectLonMaxEnc; + pointLonEnc <= rectLonMaxEnc; } else { // Rect crosses dateline: return pointLatEnc >= rectLatMinEnc && - pointLatEnc < rectLatMaxEnc && + pointLatEnc <= rectLatMaxEnc && (pointLonEnc >= rectLonMinEnc || - pointLonEnc < rectLonMaxEnc); + pointLonEnc <= rectLonMaxEnc); } } diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java b/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java index 8313616f3d2b..ac636ec97be2 100644 --- a/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java +++ b/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java @@ -519,6 +519,9 @@ public void collect(int doc) { boolean fail = false; + // Change to false to see all wrong hits: + boolean failFast = true; + for(int docID=0;docID Date: Wed, 2 Mar 2016 21:05:32 -0500 Subject: [PATCH 0007/1113] LUCENE-7060: Spatial4j 0.6 upgrade. Package com.spatial4j.core -> org.locationtech.spatial4j (cherry picked from commit 569b6ca) --- lucene/CHANGES.txt | 3 + lucene/benchmark/conf/spatial.alg | 2 +- lucene/benchmark/ivy.xml | 2 +- .../byTask/feeds/SpatialDocMaker.java | 8 +- .../byTask/feeds/SpatialFileQueryMaker.java | 4 +- lucene/ivy-versions.properties | 3 +- lucene/licenses/spatial4j-0.5-tests.jar.sha1 | 1 - lucene/licenses/spatial4j-0.5.jar.sha1 | 1 - lucene/licenses/spatial4j-0.6-tests.jar.sha1 | 1 + lucene/licenses/spatial4j-0.6.jar.sha1 | 1 + lucene/licenses/spatial4j-NOTICE.txt | 136 +++++++++++++++++- lucene/spatial-extras/ivy.xml | 4 +- .../lucene/spatial/SpatialStrategy.java | 12 +- .../bbox/BBoxOverlapRatioValueSource.java | 2 +- .../bbox/BBoxSimilarityValueSource.java | 4 +- .../lucene/spatial/bbox/BBoxStrategy.java | 10 +- .../lucene/spatial/bbox/BBoxValueSource.java | 2 +- .../composite/CompositeSpatialStrategy.java | 4 +- .../composite/IntersectsRPTVerifyQuery.java | 4 +- .../prefix/AbstractPrefixTreeQuery.java | 2 +- .../AbstractVisitingPrefixTreeQuery.java | 4 +- .../prefix/ContainsPrefixTreeQuery.java | 4 +- .../spatial/prefix/HeatmapFacetCounter.java | 10 +- .../prefix/IntersectsPrefixTreeQuery.java | 4 +- .../prefix/NumberRangePrefixTreeStrategy.java | 4 +- .../PointPrefixTreeFieldCacheProvider.java | 2 +- .../prefix/PrefixTreeFacetCounter.java | 6 +- .../spatial/prefix/PrefixTreeStrategy.java | 10 +- .../prefix/RecursivePrefixTreeStrategy.java | 4 +- .../prefix/TermQueryPrefixTreeStrategy.java | 6 +- .../spatial/prefix/WithinPrefixTreeQuery.java | 16 +-- .../lucene/spatial/prefix/tree/Cell.java | 6 +- .../prefix/tree/DateRangePrefixTree.java | 2 +- .../prefix/tree/FilterCellIterator.java | 4 +- .../prefix/tree/GeohashPrefixTree.java | 10 +- .../spatial/prefix/tree/LegacyCell.java | 6 +- .../spatial/prefix/tree/LegacyPrefixTree.java | 8 +- .../prefix/tree/NumberRangePrefixTree.java | 16 +-- .../prefix/tree/PackedQuadPrefixTree.java | 12 +- .../spatial/prefix/tree/QuadPrefixTree.java | 10 +- .../prefix/tree/SpatialPrefixTree.java | 4 +- .../prefix/tree/SpatialPrefixTreeFactory.java | 4 +- .../spatial/prefix/tree/TreeCellIterator.java | 4 +- .../lucene/spatial/query/SpatialArgs.java | 12 +- .../spatial/query/SpatialArgsParser.java | 10 +- .../spatial/query/SpatialOperation.java | 8 +- .../serialized/SerializedDVStrategy.java | 10 +- .../lucene/spatial/spatial4j/Geo3dShape.java | 14 +- .../util/DistanceToShapeValueSource.java | 10 +- .../spatial/util/ShapeAreaValueSource.java | 6 +- .../lucene/spatial/util/ShapeFieldCache.java | 2 +- .../ShapeFieldCacheDistanceValueSource.java | 6 +- .../spatial/util/ShapeFieldCacheProvider.java | 2 +- .../util/ShapePredicateValueSource.java | 2 +- .../spatial/vector/DistanceValueSource.java | 4 +- .../spatial/vector/PointVectorStrategy.java | 16 +-- .../lucene/spatial/DistanceStrategyTest.java | 6 +- .../lucene/spatial/PortedSolr3Test.java | 8 +- .../spatial/QueryEqualsHashCodeTest.java | 4 +- .../lucene/spatial/SpatialArgsTest.java | 4 +- .../apache/lucene/spatial/SpatialExample.java | 8 +- .../lucene/spatial/SpatialTestCase.java | 8 +- .../lucene/spatial/SpatialTestData.java | 4 +- .../lucene/spatial/SpatialTestQuery.java | 2 +- .../lucene/spatial/StrategyTestCase.java | 4 +- .../lucene/spatial/TestTestFramework.java | 4 +- .../lucene/spatial/bbox/TestBBoxStrategy.java | 12 +- .../composite/CompositeStrategyTest.java | 12 +- .../spatial/prefix/DateNRStrategyTest.java | 2 +- .../prefix/HeatmapFacetCounterTest.java | 18 +-- .../lucene/spatial/prefix/JtsPolygonTest.java | 8 +- .../spatial/prefix/NumberRangeFacetsTest.java | 2 +- .../RandomSpatialOpFuzzyPrefixTreeTest.java | 24 ++-- .../RandomSpatialOpStrategyTestCase.java | 2 +- .../TestRecursivePrefixTreeStrategy.java | 8 +- .../TestTermQueryPrefixGridStrategy.java | 4 +- .../prefix/tree/DateRangePrefixTreeTest.java | 4 +- .../prefix/tree/SpatialPrefixTreeTest.java | 8 +- .../spatial/query/SpatialArgsParserTest.java | 4 +- .../serialized/SerializedStrategyTest.java | 2 +- .../spatial/spatial4j/Geo3dRptTest.java | 10 +- .../Geo3dShapeRectRelationTestCase.java | 18 +-- ...Geo3dShapeSphereModelRectRelationTest.java | 2 +- .../spatial4j/RandomizedShapeTestCase.java | 22 +-- .../spatial/spatial4j/geo3d/GeoPointTest.java | 2 +- .../vector/TestPointVectorStrategy.java | 6 +- solr/core/ivy.xml | 2 +- .../component/SpatialHeatmapFacets.java | 4 +- .../solr/schema/AbstractSpatialFieldType.java | 12 +- .../AbstractSpatialPrefixTreeFieldType.java | 2 +- .../org/apache/solr/schema/BBoxField.java | 2 +- .../apache/solr/schema/DateRangeField.java | 2 +- .../org/apache/solr/schema/GeoHashField.java | 8 +- .../org/apache/solr/schema/LatLonType.java | 8 +- .../org/apache/solr/schema/PointType.java | 2 +- .../schema/RptWithGeometrySpatialField.java | 6 +- .../apache/solr/search/ValueSourceParser.java | 2 +- .../distance/GeoDistValueSourceParser.java | 8 +- .../function/distance/GeohashFunction.java | 2 +- .../distance/GeohashHaversineFunction.java | 10 +- .../distance/HaversineConstFunction.java | 4 +- .../function/distance/HaversineFunction.java | 2 +- .../org/apache/solr/util/DistanceUnits.java | 2 +- .../org/apache/solr/util/SpatialUtils.java | 18 +-- .../apache/solr/search/TestSolr4Spatial.java | 8 +- .../distance/DistanceFunctionTest.java | 4 +- .../apache/solr/util/DistanceUnitsTest.java | 2 +- solr/licenses/spatial4j-0.5.jar.sha1 | 1 - solr/licenses/spatial4j-0.6.jar.sha1 | 1 + solr/licenses/spatial4j-NOTICE.txt | 136 +++++++++++++++++- 110 files changed, 595 insertions(+), 339 deletions(-) delete mode 100644 lucene/licenses/spatial4j-0.5-tests.jar.sha1 delete mode 100644 lucene/licenses/spatial4j-0.5.jar.sha1 create mode 100644 lucene/licenses/spatial4j-0.6-tests.jar.sha1 create mode 100644 lucene/licenses/spatial4j-0.6.jar.sha1 delete mode 100644 solr/licenses/spatial4j-0.5.jar.sha1 create mode 100644 solr/licenses/spatial4j-0.6.jar.sha1 diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index c0925e8ab640..dcba07f7fd6e 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -110,6 +110,9 @@ API Changes comparator in ArrayUtil by Java 8's Comparator#naturalOrder(). (Mike McCandless, Uwe Schindler, Robert Muir) +* LUCENE-7060: Update Spatial4j to 0.6. The package com.spatial4j.core + is now org.locationtech.spatial4j. (David Smiley) + Optimizations * LUCENE-6891: Use prefix coding when writing points in diff --git a/lucene/benchmark/conf/spatial.alg b/lucene/benchmark/conf/spatial.alg index 93f10efc7743..0ee637f510f9 100644 --- a/lucene/benchmark/conf/spatial.alg +++ b/lucene/benchmark/conf/spatial.alg @@ -23,7 +23,7 @@ ### Spatial Context, Grid, Strategy config doc.maker=org.apache.lucene.benchmark.byTask.feeds.SpatialDocMaker # SpatialContext: see SpatialContextFactory.makeSpatialContext -#spatial.spatialContextFactory=com.spatial4j.core.context.jts.JtsSpatialContextFactory +#spatial.spatialContextFactory=org.locationtech.spatial4j.context.jts.JtsSpatialContextFactory #spatial.geo=true #spatial.distCalculator=haversine #spatial.worldBounds=... diff --git a/lucene/benchmark/ivy.xml b/lucene/benchmark/ivy.xml index ec02b6ab4ff3..23c208cb3ed2 100644 --- a/lucene/benchmark/ivy.xml +++ b/lucene/benchmark/ivy.xml @@ -26,7 +26,7 @@ - + diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java index b7f04d1da3b4..b466e964cc9d 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java @@ -23,10 +23,10 @@ import java.util.Random; import java.util.Set; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.benchmark.byTask.utils.Config; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialFileQueryMaker.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialFileQueryMaker.java index c99de3db071f..b6b8f50b10ef 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialFileQueryMaker.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialFileQueryMaker.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Properties; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.benchmark.byTask.utils.Config; import org.apache.lucene.queries.function.FunctionQuery; import org.apache.lucene.queries.function.ValueSource; @@ -34,7 +34,7 @@ /** * Reads spatial data from the body field docs from an internally created {@link LineDocSource}. - * It's parsed by {@link com.spatial4j.core.context.SpatialContext#readShapeFromWkt(String)} (String)} and then + * It's parsed by {@link org.locationtech.spatial4j.context.SpatialContext#readShapeFromWkt(String)} (String)} and then * further manipulated via a configurable {@link SpatialDocMaker.ShapeConverter}. When using point * data, it's likely you'll want to configure the shape converter so that the query shapes actually * cover a region. The queries are all created and cached in advance. This query maker works in diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties index 2d630a2a6a64..d5ef256337d7 100644 --- a/lucene/ivy-versions.properties +++ b/lucene/ivy-versions.properties @@ -41,7 +41,6 @@ com.google.inject.guice.version = 3.0 /com.googlecode.mp4parser/isoparser = 1.0.2 /com.ibm.icu/icu4j = 56.1 /com.pff/java-libpst = 0.8.1 -/com.spatial4j/spatial4j = 0.5 com.sun.jersey.version = 1.9 /com.sun.jersey.contribs/jersey-guice = ${com.sun.jersey.version} @@ -268,6 +267,8 @@ org.kitesdk.kite-morphlines.version = 1.1.0 /org.kitesdk/kite-morphlines-tika-decompress = ${org.kitesdk.kite-morphlines.version} /org.kitesdk/kite-morphlines-twitter = ${org.kitesdk.kite-morphlines.version} +/org.locationtech.spatial4j/spatial4j = 0.6 + /org.mockito/mockito-core = 1.9.5 org.mortbay.jetty.version = 6.1.26 diff --git a/lucene/licenses/spatial4j-0.5-tests.jar.sha1 b/lucene/licenses/spatial4j-0.5-tests.jar.sha1 deleted file mode 100644 index 0c514f8835ef..000000000000 --- a/lucene/licenses/spatial4j-0.5-tests.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdcdf20a723516a233b5bcc0ca7d4decaa88b6ed diff --git a/lucene/licenses/spatial4j-0.5.jar.sha1 b/lucene/licenses/spatial4j-0.5.jar.sha1 deleted file mode 100644 index c81a76cbe91b..000000000000 --- a/lucene/licenses/spatial4j-0.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6e16edaf6b1ba76db7f08c2f3723fce3b358ecc3 diff --git a/lucene/licenses/spatial4j-0.6-tests.jar.sha1 b/lucene/licenses/spatial4j-0.6-tests.jar.sha1 new file mode 100644 index 000000000000..0fd8404f2384 --- /dev/null +++ b/lucene/licenses/spatial4j-0.6-tests.jar.sha1 @@ -0,0 +1 @@ +0624ae8b9e43265822e0d79b481e34917fec1eba diff --git a/lucene/licenses/spatial4j-0.6.jar.sha1 b/lucene/licenses/spatial4j-0.6.jar.sha1 new file mode 100644 index 000000000000..56c02ad3016c --- /dev/null +++ b/lucene/licenses/spatial4j-0.6.jar.sha1 @@ -0,0 +1 @@ +21b15310bddcfd8c72611c180f20cf23279809a3 diff --git a/lucene/licenses/spatial4j-NOTICE.txt b/lucene/licenses/spatial4j-NOTICE.txt index d7d48d185c69..779b8df7e911 100644 --- a/lucene/licenses/spatial4j-NOTICE.txt +++ b/lucene/licenses/spatial4j-NOTICE.txt @@ -1,5 +1,133 @@ -Spatial4j -Copyright 2012-2014 The Apache Software Foundation +# about.md file -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). +## About This Content + +May 22, 2015 + +### License + +The Eclipse Foundation makes available all content in this plug-in ("Content"). Unless otherwise indicated below, the +Content is provided to you under the terms and conditions of the Apache License, Version 2.0. A copy of the Apache +License, Version 2.0 is available at +[http://www.apache.org/licenses/LICENSE-2.0.txt](http://www.apache.org/licenses/LICENSE-2.0.txt) + +If you did not receive this Content directly from the Eclipse Foundation, the Content is being redistributed by another +party ("Redistributor") and different terms and conditions may apply to your use of any object code in the Content. +Check the Redistributor’s license that was provided with the Content. If no such license exists, contact the +Redistributor. Unless otherwise indicated below, the terms and conditions of the Apache License, Version 2.0 still apply +to any source code in the Content and such source code may be obtained at +[http://www.eclipse.org](http://www.eclipse.org). + +# notice.md file + +Note: the below Eclipse user agreement is standard. It says "Unless otherwise indicated, "... before referring to the +EPL. We indicate above that all content is licensed under the ASLv2 license. -- David Smiley + +## Eclipse Foundation Software User Agreement + +April 9, 2014 + +### Usage Of Content + +THE ECLIPSE FOUNDATION MAKES AVAILABLE SOFTWARE, DOCUMENTATION, INFORMATION AND/OR OTHER MATERIALS FOR OPEN SOURCE +PROJECTS (COLLECTIVELY "CONTENT"). USE OF THE CONTENT IS GOVERNED BY THE TERMS AND CONDITIONS OF THIS AGREEMENT AND/OR +THE TERMS AND CONDITIONS OF LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW. BY USING THE CONTENT, YOU AGREE +THAT YOUR USE OF THE CONTENT IS GOVERNED BY THIS AGREEMENT AND/OR THE TERMS AND CONDITIONS OF ANY APPLICABLE LICENSE +AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW. IF YOU DO NOT AGREE TO THE TERMS AND CONDITIONS OF THIS AGREEMENT +AND THE TERMS AND CONDITIONS OF ANY APPLICABLE LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW, THEN YOU MAY +NOT USE THE CONTENT. + +### Applicable Licenses + +Unless otherwise indicated, all Content made available by the Eclipse Foundation is provided to you under the terms and +conditions of the Eclipse Public License Version 1.0 ("EPL"). A copy of the EPL is provided with this Content and is +also available at [http://www.eclipse.org/legal/epl-v10.html](http://www.eclipse.org/legal/epl-v10.html). For purposes +of the EPL, "Program" will mean the Content. + +Content includes, but is not limited to, source code, object code, documentation and other files maintained in the +Eclipse Foundation source code repository ("Repository") in software modules ("Modules") and made available as +downloadable archives ("Downloads"). + +* Content may be structured and packaged into modules to facilitate delivering, extending, and upgrading the Content. + Typical modules may include plug-ins ("Plug-ins"), plug-in fragments ("Fragments"), and features ("Features"). +* Each Plug-in or Fragment may be packaged as a sub-directory or JAR (Java™ ARchive) in a directory named "plugins". +* A Feature is a bundle of one or more Plug-ins and/or Fragments and associated material. Each Feature may be packaged + as a sub-directory in a directory named "features". Within a Feature, files named "feature.xml" may contain a list + of the names and version numbers of the Plug-ins and/or Fragments associated with that Feature. +* Features may also include other Features ("Included Features"). Within a Feature, files named "feature.xml" may + contain a list of the names and version numbers of Included Features. + +The terms and conditions governing Plug-ins and Fragments should be contained in files named "about.html" ("Abouts"). +The terms and conditions governing Features and Included Features should be contained in files named "license.html" +("Feature Licenses"). Abouts and Feature Licenses may be located in any directory of a Download or Module including, but +not limited to the following locations: + +* The top-level (root) directory +* Plug-in and Fragment directories +* Inside Plug-ins and Fragments packaged as JARs +* Sub-directories of the directory named "src" of certain Plug-ins +* Feature directories + +Note: if a Feature made available by the Eclipse Foundation is installed using the Provisioning Technology (as defined +below), you must agree to a license ("Feature Update License") during the installation process. If the Feature contains +Included Features, the Feature Update License should either provide you with the terms and conditions governing the +Included Features or inform you where you can locate them. Feature Update Licenses may be found in the "license" +property of files named "feature.properties" found within a Feature. Such Abouts, Feature Licenses, and Feature Update +Licenses contain the terms and conditions (or references to such terms and conditions) that govern your use of the +associated Content in that directory. + +THE ABOUTS, FEATURE LICENSES, AND FEATURE UPDATE LICENSES MAY REFER TO THE EPL OR OTHER LICENSE AGREEMENTS, NOTICES OR +TERMS AND CONDITIONS. SOME OF THESE OTHER LICENSE AGREEMENTS MAY INCLUDE (BUT ARE NOT LIMITED TO): + +* Eclipse Distribution License Version 1.0 (available at + [http://www.eclipse.org/licenses/edl-v1.0.html](http://www.eclipse.org/licenses/edl-v10.html)) +* Common Public License Version 1.0 (available at + [http://www.eclipse.org/legal/cpl-v10.html](http://www.eclipse.org/legal/cpl-v10.html)) +* Apache Software License 1.1 (available at + [http://www.apache.org/licenses/LICENSE](http://www.apache.org/licenses/LICENSE)) +* Apache Software License 2.0 (available at + [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)) +* Mozilla Public License Version 1.1 (available at + [http://www.mozilla.org/MPL/MPL-1.1.html](http://www.mozilla.org/MPL/MPL-1.1.html)) + +IT IS YOUR OBLIGATION TO READ AND ACCEPT ALL SUCH TERMS AND CONDITIONS PRIOR TO USE OF THE CONTENT. If no About, Feature +License, or Feature Update License is provided, please contact the Eclipse Foundation to determine what terms and +conditions govern that particular Content. + +### Use of Provisioning Technology + +The Eclipse Foundation makes available provisioning software, examples of which include, but are not limited to, p2 and +the Eclipse Update Manager ("Provisioning Technology") for the purpose of allowing users to install software, +documentation, information and/or other materials (collectively "Installable Software"). This capability is provided +with the intent of allowing such users to install, extend and update Eclipse-based products. Information about packaging +Installable Software is available at +[http://eclipse.org/equinox/p2/repository_packaging.html](http://eclipse.org/equinox/p2/repository_packaging.html) +("Specification"). + +You may use Provisioning Technology to allow other parties to install Installable Software. You shall be responsible for +enabling the applicable license agreements relating to the Installable Software to be presented to, and accepted by, the +users of the Provisioning Technology in accordance with the Specification. By using Provisioning Technology in such a +manner and making it available in accordance with the Specification, you further acknowledge your agreement to, and the +acquisition of all necessary rights to permit the following: + +1. A series of actions may occur ("Provisioning Process") in which a user may execute the Provisioning Technology on a + machine ("Target Machine") with the intent of installing, extending or updating the functionality of an + Eclipse-based product. +2. During the Provisioning Process, the Provisioning Technology may cause third party Installable Software or a portion + thereof to be accessed and copied to the Target Machine. +3. Pursuant to the Specification, you will provide to the user the terms and conditions that govern the use of the + Installable Software ("Installable Software Agreement") and such Installable Software Agreement shall be accessed + from the Target Machine in accordance with the Specification. Such Installable Software Agreement must inform the + user of the terms and conditions that govern the Installable Software and must solicit acceptance by the end user in + the manner prescribed in such Installable Software Agreement. Upon such indication of agreement by the user, the + provisioning Technology will complete installation of the Installable Software. + +### Cryptography + +Content may contain encryption software. The country in which you are currently may have restrictions on the import, +possession, and use, and/or re-export to another country, of encryption software. BEFORE using any encryption software, +please check the country's laws, regulations and policies concerning the import, possession, or use, and re-export of +encryption software, to see if this is permitted. + +Java and all Java-based trademarks are trademarks of Oracle Corporation in the United States, other countries, +or both. \ No newline at end of file diff --git a/lucene/spatial-extras/ivy.xml b/lucene/spatial-extras/ivy.xml index 4fef30ec538a..448776f0b3aa 100644 --- a/lucene/spatial-extras/ivy.xml +++ b/lucene/spatial-extras/ivy.xml @@ -23,9 +23,9 @@ - + - + diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java index f433c111e0c3..d980ba9b302c 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/SpatialStrategy.java @@ -16,10 +16,10 @@ */ package org.apache.lucene.spatial; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Field; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.ReciprocalFloatFunction; @@ -100,7 +100,7 @@ public String getFieldName() { public abstract Field[] createIndexableFields(Shape shape); /** - * See {@link #makeDistanceValueSource(com.spatial4j.core.shape.Point, double)} called with + * See {@link #makeDistanceValueSource(org.locationtech.spatial4j.shape.Point, double)} called with * a multiplier of 1.0 (i.e. units of degrees). */ public ValueSource makeDistanceValueSource(Point queryPoint) { @@ -127,7 +127,7 @@ public ValueSource makeDistanceValueSource(Point queryPoint) { /** * Returns a ValueSource with values ranging from 1 to 0, depending inversely - * on the distance from {@link #makeDistanceValueSource(com.spatial4j.core.shape.Point,double)}. + * on the distance from {@link #makeDistanceValueSource(org.locationtech.spatial4j.shape.Point,double)}. * The formula is {@code c/(d + c)} where 'd' is the distance and 'c' is * one tenth the distance to the farthest edge from the center. Thus the * scores will be 1 for indexed points at the center of the query shape and as diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxOverlapRatioValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxOverlapRatioValueSource.java index 9d0afe13dec5..101f373c3507 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxOverlapRatioValueSource.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxOverlapRatioValueSource.java @@ -21,7 +21,7 @@ import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.Explanation; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; /** * The algorithm is implemented as envelope on envelope (rect on rect) overlays rather than diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxSimilarityValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxSimilarityValueSource.java index 15cd646173d6..1d8b4b0c6f96 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxSimilarityValueSource.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxSimilarityValueSource.java @@ -27,12 +27,12 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; /** * A base class for calculating a spatial relevance rank per document from a provided * {@link ValueSource} in which {@link FunctionValues#objectVal(int)} returns a {@link - * com.spatial4j.core.shape.Rectangle}. + * org.locationtech.spatial4j.shape.Rectangle}. *

* Implementers: remember to implement equals and hashCode if you have * fields! diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java index 9565cbf21e40..5cc6788a63b5 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java @@ -16,10 +16,10 @@ */ package org.apache.lucene.spatial.bbox; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.LegacyDoubleField; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -64,7 +64,7 @@ * and a boolean to mark a dateline cross. Depending on the particular {@link * SpatialOperation}s, there are a variety of {@link org.apache.lucene.search.LegacyNumericRangeQuery}s to be * done. - * The {@link #makeOverlapRatioValueSource(com.spatial4j.core.shape.Rectangle, double)} + * The {@link #makeOverlapRatioValueSource(org.locationtech.spatial4j.shape.Rectangle, double)} * works by calculating the query bbox overlap percentage against the indexed * shape overlap percentage. The indexed shape's coordinates are retrieved from * {@link org.apache.lucene.index.LeafReader#getNumericDocValues}. diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java index 5d954074d325..2bfbfd933a32 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxValueSource.java @@ -16,7 +16,7 @@ */ package org.apache.lucene.spatial.bbox; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.DocValues; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeSpatialStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeSpatialStrategy.java index 7dc2dfaa05a6..de5bb612d415 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeSpatialStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/CompositeSpatialStrategy.java @@ -20,8 +20,8 @@ import java.util.Collections; import java.util.List; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Field; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.Query; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java index a963b6eac6b1..f60bfeeaa131 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/composite/IntersectsRPTVerifyQuery.java @@ -19,8 +19,8 @@ import java.io.IOException; import java.util.Map; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java index 127e68915884..bcf486777ed4 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeQuery.java @@ -18,7 +18,7 @@ import java.io.IOException; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeQuery.java index 2237ca9dbf0e..8ccee99c40c1 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeQuery.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeQuery.java @@ -19,8 +19,8 @@ import java.io.IOException; import java.util.Iterator; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSet; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java index 00463784ebc0..b0864f67cd35 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java @@ -19,8 +19,8 @@ import java.io.IOException; import java.util.Arrays; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.TermsEnum; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java index c6700cdef596..adee2be9cecc 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java @@ -20,11 +20,11 @@ import java.util.HashMap; import java.util.Map; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.spatial.prefix.tree.Cell; import org.apache.lucene.spatial.prefix.tree.CellIterator; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeQuery.java index ccb0f89ba177..89129a1c3fdc 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeQuery.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeQuery.java @@ -18,8 +18,8 @@ import java.io.IOException; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.spatial.prefix.tree.Cell; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/NumberRangePrefixTreeStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/NumberRangePrefixTreeStrategy.java index 8001c82afc2d..c727c0da0753 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/NumberRangePrefixTreeStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/NumberRangePrefixTreeStrategy.java @@ -23,8 +23,8 @@ import java.util.SortedMap; import java.util.TreeMap; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.spatial.prefix.tree.Cell; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PointPrefixTreeFieldCacheProvider.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PointPrefixTreeFieldCacheProvider.java index 165c41885443..f44ca4474091 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PointPrefixTreeFieldCacheProvider.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PointPrefixTreeFieldCacheProvider.java @@ -16,7 +16,7 @@ */ package org.apache.lucene.spatial.prefix; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.spatial.prefix.tree.Cell; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; import org.apache.lucene.spatial.util.ShapeFieldCacheProvider; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeFacetCounter.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeFacetCounter.java index 173c30eb8530..b3b82db35c68 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeFacetCounter.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeFacetCounter.java @@ -18,7 +18,7 @@ import java.io.IOException; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; @@ -37,9 +37,9 @@ * of double-counting the document in the facet results. Since each shape is independently turned into grid cells at * a resolution chosen by the shape's size, it's possible they will be indexed at different resolutions. This means * the document could be present in BOTH the postings for a cell in both its prefix and leaf variants. To avoid this, - * use a single valued field with a {@link com.spatial4j.core.shape.ShapeCollection} (or WKT equivalent). Or + * use a single valued field with a {@link org.locationtech.spatial4j.shape.ShapeCollection} (or WKT equivalent). Or * calculate a suitable level/distErr to index both and call - * {@link org.apache.lucene.spatial.prefix.PrefixTreeStrategy#createIndexableFields(com.spatial4j.core.shape.Shape, int)} + * {@link org.apache.lucene.spatial.prefix.PrefixTreeStrategy#createIndexableFields(org.locationtech.spatial4j.shape.Shape, int)} * with the same value for all shapes for a given document/field. * * @lucene.experimental diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java index 608879be4ee7..e9f43fd43bd8 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; @@ -47,7 +47,7 @@ *

  • Can index any shape; however only {@link RecursivePrefixTreeStrategy} * can effectively search non-point shapes.
  • *
  • Can index a variable number of shapes per field value. This strategy - * can do it via multiple calls to {@link #createIndexableFields(com.spatial4j.core.shape.Shape)} + * can do it via multiple calls to {@link #createIndexableFields(org.locationtech.spatial4j.shape.Shape)} * for a document or by giving it some sort of Shape aggregate (e.g. JTS * WKT MultiPoint). The shape's boundary is approximated to a grid precision. *
  • @@ -56,7 +56,7 @@ *
  • Only {@link org.apache.lucene.spatial.query.SpatialOperation#Intersects} * is supported. If only points are indexed then this is effectively equivalent * to IsWithin.
  • - *
  • The strategy supports {@link #makeDistanceValueSource(com.spatial4j.core.shape.Point,double)} + *
  • The strategy supports {@link #makeDistanceValueSource(org.locationtech.spatial4j.shape.Point,double)} * even for multi-valued data, so long as the indexed data is all points; the * behavior is undefined otherwise. However, it will likely be removed in * the future in lieu of using another strategy with a more scalable @@ -93,7 +93,7 @@ public SpatialPrefixTree getGrid() { } /** - * A memory hint used by {@link #makeDistanceValueSource(com.spatial4j.core.shape.Point)} + * A memory hint used by {@link #makeDistanceValueSource(org.locationtech.spatial4j.shape.Point)} * for how big the initial size of each Document's array should be. The * default is 2. Set this to slightly more than the default expected number * of points per document. diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java index 68b04499d9cf..d3d16263b9c8 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java @@ -20,8 +20,8 @@ import java.util.Iterator; import java.util.List; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.prefix.tree.Cell; import org.apache.lucene.spatial.prefix.tree.CellIterator; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/TermQueryPrefixTreeStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/TermQueryPrefixTreeStrategy.java index a74786bc7e5b..0273466aa2b6 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/TermQueryPrefixTreeStrategy.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/TermQueryPrefixTreeStrategy.java @@ -19,8 +19,8 @@ import java.util.ArrayList; import java.util.List; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.prefix.tree.Cell; @@ -35,7 +35,7 @@ /** * A basic implementation of {@link PrefixTreeStrategy} using a large * {@link TermsQuery} of all the cells from - * {@link SpatialPrefixTree#getTreeCellIterator(com.spatial4j.core.shape.Shape, int)}. + * {@link SpatialPrefixTree#getTreeCellIterator(org.locationtech.spatial4j.shape.Shape, int)}. * It only supports the search of indexed Point shapes. *

    * The precision of query shapes (distErrPct) is an important factor in using diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java index cf0d11bd0549..f595f0e4639b 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeQuery.java @@ -18,13 +18,13 @@ import java.io.IOException; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.spatial.prefix.tree.Cell; @@ -58,7 +58,7 @@ public class WithinPrefixTreeQuery extends AbstractVisitingPrefixTreeQuery { private final Shape bufferedQueryShape;//if null then the whole world /** - * See {@link AbstractVisitingPrefixTreeQuery#AbstractVisitingPrefixTreeQuery(com.spatial4j.core.shape.Shape, String, org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree, int, int)}. + * See {@link AbstractVisitingPrefixTreeQuery#AbstractVisitingPrefixTreeQuery(org.locationtech.spatial4j.shape.Shape, String, org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree, int, int)}. * {@code queryBuffer} is the (minimum) distance beyond the query shape edge * where non-matching documents are looked for so they can be excluded. If * -1 is used then the whole world is examined (a good default for correctness). diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/Cell.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/Cell.java index fe3846d1605f..f4bc45856e71 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/Cell.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/Cell.java @@ -16,14 +16,14 @@ */ package org.apache.lucene.spatial.prefix.tree; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.util.BytesRef; /** * Represents a grid cell. Cell instances are generally very transient and may be re-used * internally. To get an instance, you could start with {@link SpatialPrefixTree#getWorldCell()}. - * And from there you could either traverse down the tree with {@link #getNextLevelCells(com.spatial4j.core.shape.Shape)}, + * And from there you could either traverse down the tree with {@link #getNextLevelCells(org.locationtech.spatial4j.shape.Shape)}, * or you could read an indexed term via {@link SpatialPrefixTree#readCell(org.apache.lucene.util.BytesRef,Cell)}. * When a cell is read from a term, it is comprised of just the base bytes plus optionally a leaf flag. * diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java index 13281f3134fc..afdde71c0a7c 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java @@ -24,7 +24,7 @@ import java.util.Locale; import java.util.TimeZone; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; /** * A PrefixTree for date ranges in which the levels of the tree occur at natural periods of time (e.g. years, diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/FilterCellIterator.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/FilterCellIterator.java index e4f50e05d67d..ef170071876b 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/FilterCellIterator.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/FilterCellIterator.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial.prefix.tree; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import java.util.Iterator; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/GeohashPrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/GeohashPrefixTree.java index fa4e98745ec7..237d26a5f9fa 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/GeohashPrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/GeohashPrefixTree.java @@ -20,11 +20,11 @@ import java.util.Collection; import java.util.List; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.io.GeohashUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.io.GeohashUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.util.BytesRef; /** diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyCell.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyCell.java index 27c56a763954..d978d3c84811 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyCell.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyCell.java @@ -18,9 +18,9 @@ import java.util.Collection; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyPrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyPrefixTree.java index 672c2fe04c73..1a3afcc8740e 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyPrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/LegacyPrefixTree.java @@ -18,10 +18,10 @@ import java.util.Arrays; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.util.BytesRef; /** The base for the original two SPT's: Geohash and Quad. Don't subclass this for new SPTs. diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/NumberRangePrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/NumberRangePrefixTree.java index 40e80bcc7d72..72b689bc0187 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/NumberRangePrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/NumberRangePrefixTree.java @@ -18,13 +18,13 @@ import java.text.ParseException; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; -import com.spatial4j.core.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; @@ -47,7 +47,7 @@ * * * Unlike "normal" spatial components in this module, this special-purpose one only works with {@link Shape}s - * created by the methods on this class, not from any {@link com.spatial4j.core.context.SpatialContext}. + * created by the methods on this class, not from any {@link org.locationtech.spatial4j.context.SpatialContext}. * * @see org.apache.lucene.spatial.prefix.NumberRangePrefixTreeStrategy * @see LUCENE-5648 diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java index 6fe2bffea6f8..b86a6d116ff2 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/PackedQuadPrefixTree.java @@ -21,12 +21,12 @@ import java.util.List; import java.util.NoSuchElementException; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; -import com.spatial4j.core.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; import org.apache.lucene.util.BytesRef; /** diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java index 48dac8714233..3242e7e623f4 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/QuadPrefixTree.java @@ -24,11 +24,11 @@ import java.util.List; import java.util.Locale; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.util.BytesRef; /** diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTree.java index 8ead954d66ca..ae2fe83d20e7 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTree.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial.prefix.tree; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.util.BytesRef; /** diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeFactory.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeFactory.java index b74dc93bded6..67480d52ff8f 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeFactory.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeFactory.java @@ -18,8 +18,8 @@ import java.util.Map; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; /** * Abstract Factory for creating {@link SpatialPrefixTree} instances with useful diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/TreeCellIterator.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/TreeCellIterator.java index 3ec56aca3b80..39c8068d8f9f 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/TreeCellIterator.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/TreeCellIterator.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial.prefix.tree; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; /** * Navigates a {@link org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree} from a given cell (typically the world diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgs.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgs.java index 0503072cd282..37a5503e16af 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgs.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgs.java @@ -16,10 +16,10 @@ */ package org.apache.lucene.spatial.query; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; /** * Principally holds the query {@link Shape} and the {@link SpatialOperation}. @@ -120,8 +120,8 @@ public void setShape(Shape shape) { * inflates the size of the shape but should not shrink it. * * @return 0 to 0.5 - * @see #calcDistanceFromErrPct(com.spatial4j.core.shape.Shape, double, - * com.spatial4j.core.context.SpatialContext) + * @see #calcDistanceFromErrPct(org.locationtech.spatial4j.shape.Shape, double, + * org.locationtech.spatial4j.context.SpatialContext) */ public Double getDistErrPct() { return distErrPct; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgsParser.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgsParser.java index 81612fff5858..79ad7a94803e 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgsParser.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialArgsParser.java @@ -16,9 +16,9 @@ */ package org.apache.lucene.spatial.query; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Shape; import java.text.ParseException; import java.util.HashMap; @@ -29,8 +29,8 @@ /** * Parses a string that usually looks like "OPERATION(SHAPE)" into a {@link SpatialArgs} * object. The set of operations supported are defined in {@link SpatialOperation}, such - * as "Intersects" being a common one. The shape portion is defined by WKT {@link com.spatial4j.core.io.WktShapeParser}, - * but it can be overridden/customized via {@link #parseShape(String, com.spatial4j.core.context.SpatialContext)}. + * as "Intersects" being a common one. The shape portion is defined by WKT {@link org.locationtech.spatial4j.io.WktShapeParser}, + * but it can be overridden/customized via {@link #parseShape(String, org.locationtech.spatial4j.context.SpatialContext)}. * There are some optional name-value pair parameters that follow the closing parenthesis. Example: *

      *   Intersects(ENVELOPE(-10,-8,22,20)) distErrPct=0.025
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialOperation.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialOperation.java
    index 7d750ac6ff96..1eeb4bc08d35 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialOperation.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/query/SpatialOperation.java
    @@ -16,9 +16,9 @@
      */
     package org.apache.lucene.spatial.query;
     
    -import com.spatial4j.core.shape.Rectangle;
    -import com.spatial4j.core.shape.Shape;
    -import com.spatial4j.core.shape.SpatialRelation;
    +import org.locationtech.spatial4j.shape.Rectangle;
    +import org.locationtech.spatial4j.shape.Shape;
    +import org.locationtech.spatial4j.shape.SpatialRelation;
     
     import java.io.Serializable;
     import java.util.ArrayList;
    @@ -30,7 +30,7 @@
     /**
      * A predicate that compares a stored geometry to a supplied geometry. It's enum-like. For more
      * explanation of each predicate, consider looking at the source implementation
    - * of {@link #evaluate(com.spatial4j.core.shape.Shape, com.spatial4j.core.shape.Shape)}. It's important
    + * of {@link #evaluate(org.locationtech.spatial4j.shape.Shape, org.locationtech.spatial4j.shape.Shape)}. It's important
      * to be aware that Lucene-spatial makes no distinction of shape boundaries, unlike many standardized
      * definitions. Nor does it make dimensional distinctions (e.g. line vs polygon).
      * You can lookup a predicate by "Covers" or "Contains", for example, and you will get the
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java
    index a6c575bc3f58..cf2c329205ef 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/serialized/SerializedDVStrategy.java
    @@ -24,10 +24,10 @@
     import java.io.IOException;
     import java.util.Map;
     
    -import com.spatial4j.core.context.SpatialContext;
    -import com.spatial4j.core.io.BinaryCodec;
    -import com.spatial4j.core.shape.Point;
    -import com.spatial4j.core.shape.Shape;
    +import org.locationtech.spatial4j.context.SpatialContext;
    +import org.locationtech.spatial4j.io.BinaryCodec;
    +import org.locationtech.spatial4j.shape.Point;
    +import org.locationtech.spatial4j.shape.Shape;
     import org.apache.lucene.document.BinaryDocValuesField;
     import org.apache.lucene.document.Field;
     import org.apache.lucene.index.BinaryDocValues;
    @@ -55,7 +55,7 @@
      * SpatialStrategy that is approximated (like {@link org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy})
      * to add precision or eventually make more specific / advanced calculations on the per-document
      * geometry.
    - * The serialization uses Spatial4j's {@link com.spatial4j.core.io.BinaryCodec}.
    + * The serialization uses Spatial4j's {@link org.locationtech.spatial4j.io.BinaryCodec}.
      *
      * @lucene.experimental
      */
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java
    index 7a3078a0bef1..518fb32b4f17 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java
    @@ -16,13 +16,13 @@
      */
     package org.apache.lucene.spatial.spatial4j;
     
    -import com.spatial4j.core.context.SpatialContext;
    -import com.spatial4j.core.distance.DistanceUtils;
    -import com.spatial4j.core.shape.Point;
    -import com.spatial4j.core.shape.Rectangle;
    -import com.spatial4j.core.shape.Shape;
    -import com.spatial4j.core.shape.SpatialRelation;
    -import com.spatial4j.core.shape.impl.RectangleImpl;
    +import org.locationtech.spatial4j.context.SpatialContext;
    +import org.locationtech.spatial4j.distance.DistanceUtils;
    +import org.locationtech.spatial4j.shape.Point;
    +import org.locationtech.spatial4j.shape.Rectangle;
    +import org.locationtech.spatial4j.shape.Shape;
    +import org.locationtech.spatial4j.shape.SpatialRelation;
    +import org.locationtech.spatial4j.shape.impl.RectangleImpl;
     import org.apache.lucene.geo3d.LatLonBounds;
     import org.apache.lucene.geo3d.GeoArea;
     import org.apache.lucene.geo3d.GeoAreaFactory;
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/DistanceToShapeValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/DistanceToShapeValueSource.java
    index 57cad87f97b9..7be24337d865 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/DistanceToShapeValueSource.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/DistanceToShapeValueSource.java
    @@ -29,15 +29,15 @@
     import org.apache.lucene.search.Explanation;
     import org.apache.lucene.search.IndexSearcher;
     
    -import com.spatial4j.core.context.SpatialContext;
    -import com.spatial4j.core.distance.DistanceCalculator;
    -import com.spatial4j.core.shape.Point;
    -import com.spatial4j.core.shape.Shape;
    +import org.locationtech.spatial4j.context.SpatialContext;
    +import org.locationtech.spatial4j.distance.DistanceCalculator;
    +import org.locationtech.spatial4j.shape.Point;
    +import org.locationtech.spatial4j.shape.Shape;
     
     /**
      * The distance from a provided Point to a Point retrieved from a ValueSource via
      * {@link org.apache.lucene.queries.function.FunctionValues#objectVal(int)}. The distance
    - * is calculated via a {@link com.spatial4j.core.distance.DistanceCalculator}.
    + * is calculated via a {@link org.locationtech.spatial4j.distance.DistanceCalculator}.
      *
      * @lucene.experimental
      */
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java
    index dd391d1fc963..257dc6778d23 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java
    @@ -22,8 +22,8 @@
     import java.util.List;
     import java.util.Map;
     
    -import com.spatial4j.core.context.SpatialContext;
    -import com.spatial4j.core.shape.Shape;
    +import org.locationtech.spatial4j.context.SpatialContext;
    +import org.locationtech.spatial4j.shape.Shape;
     
     import org.apache.lucene.index.LeafReaderContext;
     import org.apache.lucene.queries.function.FunctionValues;
    @@ -36,7 +36,7 @@
      * The area of a Shape retrieved from a ValueSource via
      * {@link org.apache.lucene.queries.function.FunctionValues#objectVal(int)}.
      *
    - * @see Shape#getArea(com.spatial4j.core.context.SpatialContext)
    + * @see Shape#getArea(org.locationtech.spatial4j.context.SpatialContext)
      *
      * @lucene.experimental
      */
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCache.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCache.java
    index 480369bcd82f..e24cd6832107 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCache.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCache.java
    @@ -16,7 +16,7 @@
      */
     package org.apache.lucene.spatial.util;
     
    -import com.spatial4j.core.shape.Shape;
    +import org.locationtech.spatial4j.shape.Shape;
     
     import java.util.ArrayList;
     import java.util.List;
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheDistanceValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheDistanceValueSource.java
    index e4cb1463672c..1ac84e8785c8 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheDistanceValueSource.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheDistanceValueSource.java
    @@ -16,9 +16,9 @@
      */
     package org.apache.lucene.spatial.util;
     
    -import com.spatial4j.core.context.SpatialContext;
    -import com.spatial4j.core.distance.DistanceCalculator;
    -import com.spatial4j.core.shape.Point;
    +import org.locationtech.spatial4j.context.SpatialContext;
    +import org.locationtech.spatial4j.distance.DistanceCalculator;
    +import org.locationtech.spatial4j.shape.Point;
     import org.apache.lucene.index.LeafReaderContext;
     import org.apache.lucene.queries.function.FunctionValues;
     import org.apache.lucene.queries.function.ValueSource;
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheProvider.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheProvider.java
    index 04c52f79a67f..bca73ccf4dc1 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheProvider.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapeFieldCacheProvider.java
    @@ -16,7 +16,7 @@
      */
     package org.apache.lucene.spatial.util;
     
    -import com.spatial4j.core.shape.Shape;
    +import org.locationtech.spatial4j.shape.Shape;
     import org.apache.lucene.index.*;
     import org.apache.lucene.search.DocIdSetIterator;
     import org.apache.lucene.util.BytesRef;
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapePredicateValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapePredicateValueSource.java
    index b1dfaaaf5698..08c1e43fbcc5 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapePredicateValueSource.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/ShapePredicateValueSource.java
    @@ -16,7 +16,7 @@
      */
     package org.apache.lucene.spatial.util;
     
    -import com.spatial4j.core.shape.Shape;
    +import org.locationtech.spatial4j.shape.Shape;
     
     import org.apache.lucene.index.LeafReaderContext;
     import org.apache.lucene.queries.function.FunctionValues;
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/DistanceValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/DistanceValueSource.java
    index d31fd594466e..7cab3fe0a075 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/DistanceValueSource.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/DistanceValueSource.java
    @@ -16,8 +16,8 @@
      */
     package org.apache.lucene.spatial.vector;
     
    -import com.spatial4j.core.distance.DistanceCalculator;
    -import com.spatial4j.core.shape.Point;
    +import org.locationtech.spatial4j.distance.DistanceCalculator;
    +import org.locationtech.spatial4j.shape.Point;
     import org.apache.lucene.index.LeafReader;
     import org.apache.lucene.index.LeafReaderContext;
     import org.apache.lucene.index.DocValues;
    diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java
    index f572f82f3ca2..f5f5f34121f7 100644
    --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java
    +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java
    @@ -16,11 +16,11 @@
      */
     package org.apache.lucene.spatial.vector;
     
    -import com.spatial4j.core.context.SpatialContext;
    -import com.spatial4j.core.shape.Circle;
    -import com.spatial4j.core.shape.Point;
    -import com.spatial4j.core.shape.Rectangle;
    -import com.spatial4j.core.shape.Shape;
    +import org.locationtech.spatial4j.context.SpatialContext;
    +import org.locationtech.spatial4j.shape.Circle;
    +import org.locationtech.spatial4j.shape.Point;
    +import org.locationtech.spatial4j.shape.Rectangle;
    +import org.locationtech.spatial4j.shape.Shape;
     import org.apache.lucene.document.LegacyDoubleField;
     import org.apache.lucene.document.Field;
     import org.apache.lucene.document.FieldType;
    @@ -50,7 +50,7 @@
      * org.apache.lucene.spatial.query.SpatialOperation#Intersects} and {@link
      * SpatialOperation#IsWithin} is supported.
  • *
  • Uses the FieldCache for - * {@link #makeDistanceValueSource(com.spatial4j.core.shape.Point)} and for + * {@link #makeDistanceValueSource(org.locationtech.spatial4j.shape.Point)} and for * searching with a Circle.
  • * * @@ -60,7 +60,7 @@ * This is a simple Strategy. Search works with {@link org.apache.lucene.search.LegacyNumericRangeQuery}s on * an x and y pair of fields. A Circle query does the same bbox query but adds a * ValueSource filter on - * {@link #makeDistanceValueSource(com.spatial4j.core.shape.Point)}. + * {@link #makeDistanceValueSource(org.locationtech.spatial4j.shape.Point)}. *

    * One performance shortcoming with this strategy is that a scenario involving * both a search using a Circle and sort will result in calculations for the @@ -106,7 +106,7 @@ public Field[] createIndexableFields(Shape shape) { throw new UnsupportedOperationException("Can only index Point, not " + shape); } - /** @see #createIndexableFields(com.spatial4j.core.shape.Shape) */ + /** @see #createIndexableFields(org.locationtech.spatial4j.shape.Shape) */ public Field[] createIndexableFields(Point point) { FieldType doubleFieldType = new FieldType(LegacyDoubleField.TYPE_NOT_STORED); doubleFieldType.setNumericPrecisionStep(precisionStep); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java index 9a29677c6e73..160267905436 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/DistanceStrategyTest.java @@ -22,9 +22,9 @@ import java.util.List; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.spatial.bbox.BBoxStrategy; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java index 8506c868ddee..a081497c387a 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/PortedSolr3Test.java @@ -22,10 +22,10 @@ import java.util.Set; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java index b1a5e542fc50..5dbb8f837e85 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/QueryEqualsHashCodeTest.java @@ -19,8 +19,8 @@ import java.util.ArrayList; import java.util.Collection; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.bbox.BBoxStrategy; import org.apache.lucene.spatial.composite.CompositeSpatialStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialArgsTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialArgsTest.java index 09b5d4666266..094953a95db1 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialArgsTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialArgsTest.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.query.SpatialArgs; import org.junit.Test; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java index 1bd715969c05..76e0200f26b3 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java @@ -18,10 +18,10 @@ import java.io.IOException; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java index 94e5a8e68a72..529e98bc5587 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java @@ -24,10 +24,10 @@ import java.util.Random; import java.util.logging.Logger; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestData.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestData.java index 27d47b333646..06a68ce72e97 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestData.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestData.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; import java.io.BufferedReader; import java.io.IOException; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestQuery.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestQuery.java index bac90cf45ca1..47e9c120eecb 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestQuery.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestQuery.java @@ -16,7 +16,7 @@ */ package org.apache.lucene.spatial; -import com.spatial4j.core.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContext; import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialArgsParser; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java index 00e437b857d6..df37d18d0e00 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java @@ -29,8 +29,8 @@ import java.util.Set; import java.util.logging.Logger; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StoredField; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/TestTestFramework.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/TestTestFramework.java index d31fdf635bfb..6af7467c2caf 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/TestTestFramework.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/TestTestFramework.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Rectangle; import org.apache.lucene.spatial.query.SpatialArgsParser; import org.apache.lucene.spatial.query.SpatialOperation; import org.apache.lucene.util.LuceneTestCase; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java index 6140996637e9..20a7202ba331 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java @@ -19,12 +19,12 @@ import java.io.IOException; import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/composite/CompositeStrategyTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/composite/CompositeStrategyTest.java index 8e1bb513c46f..7d49e8b2e4cb 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/composite/CompositeStrategyTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/composite/CompositeStrategyTest.java @@ -19,12 +19,12 @@ import java.io.IOException; import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; import org.apache.lucene.spatial.prefix.RandomSpatialOpStrategyTestCase; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java index 7cd4723d5b6c..33c8a330af95 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/DateNRStrategyTest.java @@ -20,7 +20,7 @@ import java.util.Calendar; import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.prefix.tree.DateRangePrefixTree; import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape; import org.apache.lucene.spatial.query.SpatialOperation; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java index 124af79041b3..2de18cc32256 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java @@ -21,15 +21,15 @@ import java.util.List; import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; -import com.spatial4j.core.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.spatial.StrategyTestCase; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java index 09fb3a9fd45d..3f1684f58b5a 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/JtsPolygonTest.java @@ -16,9 +16,9 @@ */ package org.apache.lucene.spatial.prefix; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; @@ -45,7 +45,7 @@ public JtsPolygonTest() { try { HashMap args = new HashMap<>(); args.put("spatialContextFactory", - "com.spatial4j.core.context.jts.JtsSpatialContextFactory"); + "org.locationtech.spatial4j.context.jts.JtsSpatialContextFactory"); ctx = SpatialContextFactory.makeSpatialContext(args, getClass().getClassLoader()); } catch (NoClassDefFoundError e) { assumeTrue("This test requires JTS jar: "+e, false); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java index 11e1d18701cf..514c18e078be 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java @@ -23,7 +23,7 @@ import java.util.List; import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java index 8db131c84a6f..c7e107cfba17 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java @@ -29,14 +29,14 @@ import java.util.Set; import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.ShapeCollection; -import com.spatial4j.core.shape.SpatialRelation; -import com.spatial4j.core.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StoredField; @@ -56,10 +56,10 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween; -import static com.spatial4j.core.shape.SpatialRelation.CONTAINS; -import static com.spatial4j.core.shape.SpatialRelation.DISJOINT; -import static com.spatial4j.core.shape.SpatialRelation.INTERSECTS; -import static com.spatial4j.core.shape.SpatialRelation.WITHIN; +import static org.locationtech.spatial4j.shape.SpatialRelation.CONTAINS; +import static org.locationtech.spatial4j.shape.SpatialRelation.DISJOINT; +import static org.locationtech.spatial4j.shape.SpatialRelation.INTERSECTS; +import static org.locationtech.spatial4j.shape.SpatialRelation.WITHIN; /** Randomized PrefixTree test that considers the fuzziness of the * results introduced by grid approximation. */ diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java index 87f1071503c8..22c58393f5f8 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpStrategyTestCase.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Set; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.StrategyTestCase; import org.apache.lucene.spatial.query.SpatialArgs; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java index a53d52dee7ad..f852464ca198 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java @@ -16,10 +16,10 @@ */ package org.apache.lucene.spatial.prefix; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.SpatialMatchConcern; import org.apache.lucene.spatial.StrategyTestCase; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java index 1a912c046203..fc131c5d7884 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial.prefix; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StoredField; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTreeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTreeTest.java index 74a989efb249..12e9744064b1 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTreeTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTreeTest.java @@ -21,8 +21,8 @@ import java.util.Calendar; import java.util.GregorianCalendar; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeTest.java index 403b8d196374..8a3d79c3b291 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/SpatialPrefixTreeTest.java @@ -16,10 +16,10 @@ */ package org.apache.lucene.spatial.prefix.tree; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/query/SpatialArgsParserTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/query/SpatialArgsParserTest.java index 93b95f3d5244..9724c6519b22 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/query/SpatialArgsParserTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/query/SpatialArgsParserTest.java @@ -16,8 +16,8 @@ */ package org.apache.lucene.spatial.query; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Rectangle; import org.apache.lucene.util.LuceneTestCase; import org.junit.Test; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/serialized/SerializedStrategyTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/serialized/SerializedStrategyTest.java index bed833939400..6a73d23d2f28 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/serialized/SerializedStrategyTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/serialized/SerializedStrategyTest.java @@ -18,7 +18,7 @@ import java.io.IOException; -import com.spatial4j.core.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContext; import org.apache.lucene.spatial.SpatialMatchConcern; import org.apache.lucene.spatial.StrategyTestCase; import org.junit.Before; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java index 8040a351ce21..d26bb29a5979 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java @@ -21,10 +21,10 @@ import java.util.List; import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.composite.CompositeSpatialStrategy; import org.apache.lucene.spatial.prefix.RandomSpatialOpStrategyTestCase; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; @@ -41,7 +41,7 @@ import org.apache.lucene.geo3d.PlanetModel; import org.junit.Test; -import static com.spatial4j.core.distance.DistanceUtils.DEGREES_TO_RADIANS; +import static org.locationtech.spatial4j.distance.DistanceUtils.DEGREES_TO_RADIANS; public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase { diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java index 58b520db81cf..134b8c753666 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java @@ -19,12 +19,12 @@ import java.util.ArrayList; import java.util.List; -import com.spatial4j.core.TestLog; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.RectIntersectionTestHelper; +import org.locationtech.spatial4j.TestLog; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.RectIntersectionTestHelper; import org.apache.lucene.geo3d.LatLonBounds; import org.apache.lucene.geo3d.GeoBBox; import org.apache.lucene.geo3d.GeoBBoxFactory; @@ -37,7 +37,7 @@ import org.junit.Rule; import org.junit.Test; -import static com.spatial4j.core.distance.DistanceUtils.DEGREES_TO_RADIANS; +import static org.locationtech.spatial4j.distance.DistanceUtils.DEGREES_TO_RADIANS; public abstract class Geo3dShapeRectRelationTestCase extends RandomizedShapeTestCase { protected final static double RADIANS_PER_DEGREE = Math.PI/180.0; @@ -110,7 +110,6 @@ protected int getBoundingMinimum(int laps) { } } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-6867") @Test public void testGeoCircleRect() { new Geo3dRectIntersectionTestHelper(ctx) { @@ -133,7 +132,6 @@ protected Point randomPointInEmptyShape(Geo3dShape shape) { }.testRelateWithRectangle(); } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-6867") @Test public void testGeoBBoxRect() { new Geo3dRectIntersectionTestHelper(ctx) { @@ -168,7 +166,6 @@ protected Point randomPointInEmptyShape(Geo3dShape shape) { }.testRelateWithRectangle(); } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-6867") @Test public void testGeoPolygonRect() { new Geo3dRectIntersectionTestHelper(ctx) { @@ -212,7 +209,6 @@ protected int getWithinMinimum(int laps) { }.testRelateWithRectangle(); } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-6867") @Test public void testGeoPathRect() { new Geo3dRectIntersectionTestHelper(ctx) { diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java index aac0a0a04148..2d958231e6d2 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import java.util.List; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import org.apache.lucene.geo3d.GeoArea; import org.apache.lucene.geo3d.GeoBBox; import org.apache.lucene.geo3d.GeoBBoxFactory; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java index 40d1b248a702..0c18f5d7ddea 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/RandomizedShapeTestCase.java @@ -16,17 +16,17 @@ */ package org.apache.lucene.spatial.spatial4j; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.SpatialRelation; -import com.spatial4j.core.shape.impl.Range; - -import static com.spatial4j.core.shape.SpatialRelation.CONTAINS; -import static com.spatial4j.core.shape.SpatialRelation.WITHIN; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.SpatialRelation; +import org.locationtech.spatial4j.shape.impl.Range; + +import static org.locationtech.spatial4j.shape.SpatialRelation.CONTAINS; +import static org.locationtech.spatial4j.shape.SpatialRelation.WITHIN; import org.apache.lucene.util.LuceneTestCase; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d/GeoPointTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d/GeoPointTest.java index 1d559da5835a..444647404e5c 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d/GeoPointTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d/GeoPointTest.java @@ -21,7 +21,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.junit.Test; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomFloat; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java index d62a0a8231fb..69f8c4da7abf 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java @@ -16,9 +16,9 @@ */ package org.apache.lucene.spatial.vector; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Circle; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.SpatialMatchConcern; import org.apache.lucene.spatial.StrategyTestCase; diff --git a/solr/core/ivy.xml b/solr/core/ivy.xml index 2936c5bb339e..5f8706f0e43f 100644 --- a/solr/core/ivy.xml +++ b/solr/core/ivy.xml @@ -36,7 +36,7 @@ - + diff --git a/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java b/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java index dc1b9afd54d6..4ad882c8e99f 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java +++ b/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java @@ -32,8 +32,8 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.spatial.prefix.HeatmapFacetCounter; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.query.SpatialArgs; diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java index 83fd44775f41..222f0b83eb69 100644 --- a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java +++ b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java @@ -57,12 +57,12 @@ import com.google.common.base.Throwables; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.context.SpatialContextFactory; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.context.SpatialContextFactory; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; /** * Abstract base class for Solr FieldTypes based on a Lucene 4 {@link SpatialStrategy}. diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java index 164398b48dd1..abf55f1bb625 100644 --- a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java +++ b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java @@ -31,7 +31,7 @@ import org.apache.lucene.spatial.query.SpatialArgsParser; import org.apache.solr.util.MapListener; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/solr/core/src/java/org/apache/solr/schema/BBoxField.java b/solr/core/src/java/org/apache/solr/schema/BBoxField.java index 2f282c85549e..f655e68ce068 100644 --- a/solr/core/src/java/org/apache/solr/schema/BBoxField.java +++ b/solr/core/src/java/org/apache/solr/schema/BBoxField.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Map; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.shape.Rectangle; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.spatial.bbox.BBoxOverlapRatioValueSource; diff --git a/solr/core/src/java/org/apache/solr/schema/DateRangeField.java b/solr/core/src/java/org/apache/solr/schema/DateRangeField.java index 38b68005ce05..95b441a9ed1d 100644 --- a/solr/core/src/java/org/apache/solr/schema/DateRangeField.java +++ b/solr/core/src/java/org/apache/solr/schema/DateRangeField.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Map; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.shape.Shape; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; diff --git a/solr/core/src/java/org/apache/solr/schema/GeoHashField.java b/solr/core/src/java/org/apache/solr/schema/GeoHashField.java index 7deae5f9ddeb..2baf72e192b6 100644 --- a/solr/core/src/java/org/apache/solr/schema/GeoHashField.java +++ b/solr/core/src/java/org/apache/solr/schema/GeoHashField.java @@ -18,10 +18,10 @@ import java.io.IOException; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.io.GeohashUtils; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.io.GeohashUtils; +import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.index.IndexableField; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.LiteralValueSource; diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonType.java b/solr/core/src/java/org/apache/solr/schema/LatLonType.java index 8592c5aa0583..c30729adb48f 100644 --- a/solr/core/src/java/org/apache/solr/schema/LatLonType.java +++ b/solr/core/src/java/org/apache/solr/schema/LatLonType.java @@ -47,10 +47,10 @@ import org.apache.solr.search.SpatialOptions; import org.apache.solr.util.SpatialUtils; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; /** diff --git a/solr/core/src/java/org/apache/solr/schema/PointType.java b/solr/core/src/java/org/apache/solr/schema/PointType.java index 934523189014..b2f15ab533e4 100644 --- a/solr/core/src/java/org/apache/solr/schema/PointType.java +++ b/solr/core/src/java/org/apache/solr/schema/PointType.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Map; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.queries.function.ValueSource; diff --git a/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java b/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java index fe4cedc04c58..b633174f9100 100644 --- a/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java +++ b/solr/core/src/java/org/apache/solr/schema/RptWithGeometrySpatialField.java @@ -20,9 +20,9 @@ import java.lang.ref.WeakReference; import java.util.Map; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.shape.Shape; -import com.spatial4j.core.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; diff --git a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java index 2c05f33c3e8f..0830267652aa 100644 --- a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java +++ b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java @@ -16,7 +16,7 @@ */ package org.apache.solr.search; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java index 1f4ea34ff955..956550c3ac3d 100644 --- a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java +++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java @@ -20,9 +20,9 @@ import java.util.Collections; import java.util.List; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.ConstNumberSource; import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; @@ -41,7 +41,7 @@ /** * Parses "geodist" creating {@link HaversineConstFunction} or {@link HaversineFunction} - * or calling {@link SpatialStrategy#makeDistanceValueSource(com.spatial4j.core.shape.Point,double)}. + * or calling {@link SpatialStrategy#makeDistanceValueSource(org.locationtech.spatial4j.shape.Point,double)}. */ public class GeoDistValueSourceParser extends ValueSourceParser { diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java index c4a7bd55705b..b00f2fd861a0 100644 --- a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java +++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java @@ -18,7 +18,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; -import com.spatial4j.core.io.GeohashUtils; +import org.locationtech.spatial4j.io.GeohashUtils; import java.util.Map; import java.io.IOException; diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java index 4dd49e481219..915db3336786 100644 --- a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java +++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java @@ -15,11 +15,11 @@ * limitations under the License. */ package org.apache.solr.search.function.distance; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.distance.GeodesicSphereDistCalc; -import com.spatial4j.core.io.GeohashUtils; -import com.spatial4j.core.shape.Point; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.GeodesicSphereDistCalc; +import org.locationtech.spatial4j.io.GeohashUtils; +import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java index f9ac062dda3b..4b68f5cec706 100644 --- a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java +++ b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java @@ -15,7 +15,7 @@ * limitations under the License. */ package org.apache.solr.search.function.distance; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; @@ -26,7 +26,7 @@ import java.io.IOException; import java.util.Map; -import static com.spatial4j.core.distance.DistanceUtils.DEGREES_TO_RADIANS; +import static org.locationtech.spatial4j.distance.DistanceUtils.DEGREES_TO_RADIANS; /** * Haversine function with one point constant diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java index c0eb0442ed24..20ed5a733bbd 100644 --- a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java +++ b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java @@ -21,7 +21,7 @@ import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.apache.lucene.queries.function.valuesource.MultiValueSource; import org.apache.lucene.search.IndexSearcher; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; import org.apache.solr.common.SolrException; import java.io.IOException; diff --git a/solr/core/src/java/org/apache/solr/util/DistanceUnits.java b/solr/core/src/java/org/apache/solr/util/DistanceUnits.java index 63f43e4652fb..a2163ac7430d 100644 --- a/solr/core/src/java/org/apache/solr/util/DistanceUnits.java +++ b/solr/core/src/java/org/apache/solr/util/DistanceUnits.java @@ -21,7 +21,7 @@ import java.util.Set; import com.google.common.collect.ImmutableMap; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; import org.apache.solr.schema.AbstractSpatialFieldType; /** diff --git a/solr/core/src/java/org/apache/solr/util/SpatialUtils.java b/solr/core/src/java/org/apache/solr/util/SpatialUtils.java index 1b05d01f5e91..9f6019cff338 100644 --- a/solr/core/src/java/org/apache/solr/util/SpatialUtils.java +++ b/solr/core/src/java/org/apache/solr/util/SpatialUtils.java @@ -18,11 +18,11 @@ import java.text.ParseException; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.exception.InvalidShapeException; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; -import com.spatial4j.core.shape.Shape; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; import org.apache.solr.common.SolrException; /** Utility methods pertaining to spatial. */ @@ -32,7 +32,7 @@ private SpatialUtils() {} /** * Parses a 'geom' parameter (might also be used to parse shapes for indexing). {@code geomStr} can either be WKT or - * a rectangle-range syntax (see {@link #parseRectangle(String, com.spatial4j.core.context.SpatialContext)}. + * a rectangle-range syntax (see {@link #parseRectangle(String, org.locationtech.spatial4j.context.SpatialContext)}. */ public static Shape parseGeomSolrException(String geomStr, SpatialContext ctx) { if (geomStr.length() == 0) { @@ -100,7 +100,7 @@ private static int findIndexNotSpace(String str, int startIdx, int inc) { return idx; } - /** Calls {@link #parsePoint(String, com.spatial4j.core.context.SpatialContext)} and wraps + /** Calls {@link #parsePoint(String, org.locationtech.spatial4j.context.SpatialContext)} and wraps * the exception with {@link org.apache.solr.common.SolrException} with a helpful message. */ public static Point parsePointSolrException(String externalVal, SpatialContext ctx) throws SolrException { try { @@ -116,7 +116,7 @@ public static Point parsePointSolrException(String externalVal, SpatialContext c /** * Parses {@code str} in the format of '[minPoint TO maxPoint]' where {@code minPoint} is the lower left corner * and maxPoint is the upper-right corner of the bounding box. Both corners may optionally be wrapped with a quote - * and then it's parsed via {@link #parsePoint(String, com.spatial4j.core.context.SpatialContext)}. + * and then it's parsed via {@link #parsePoint(String, org.locationtech.spatial4j.context.SpatialContext)}. * @param str Non-null; may *not* have leading or trailing spaces * @param ctx Non-null * @return the Rectangle @@ -140,7 +140,7 @@ public static Rectangle parseRectangle(String str, SpatialContext ctx) throws In } /** - * Calls {@link #parseRectangle(String, com.spatial4j.core.context.SpatialContext)} and wraps the exception with + * Calls {@link #parseRectangle(String, org.locationtech.spatial4j.context.SpatialContext)} and wraps the exception with * {@link org.apache.solr.common.SolrException} with a helpful message. */ public static Rectangle parseRectangeSolrException(String externalVal, SpatialContext ctx) throws SolrException { diff --git a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java index 20555c6fa506..2fe37409520f 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java @@ -21,10 +21,10 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import com.spatial4j.core.context.SpatialContext; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.shape.Point; -import com.spatial4j.core.shape.Rectangle; +import org.locationtech.spatial4j.context.SpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; import org.apache.lucene.spatial.bbox.BBoxStrategy; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; diff --git a/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java b/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java index f33bc2a686fc..b3ad183a596d 100644 --- a/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java +++ b/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java @@ -15,8 +15,8 @@ * limitations under the License. */ package org.apache.solr.search.function.distance; -import com.spatial4j.core.distance.DistanceUtils; -import com.spatial4j.core.io.GeohashUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.io.GeohashUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.junit.BeforeClass; diff --git a/solr/core/src/test/org/apache/solr/util/DistanceUnitsTest.java b/solr/core/src/test/org/apache/solr/util/DistanceUnitsTest.java index f16292cc3c50..395637166a09 100644 --- a/solr/core/src/test/org/apache/solr/util/DistanceUnitsTest.java +++ b/solr/core/src/test/org/apache/solr/util/DistanceUnitsTest.java @@ -16,7 +16,7 @@ */ package org.apache.solr.util; -import com.spatial4j.core.distance.DistanceUtils; +import org.locationtech.spatial4j.distance.DistanceUtils; import org.apache.lucene.util.LuceneTestCase; public class DistanceUnitsTest extends LuceneTestCase { diff --git a/solr/licenses/spatial4j-0.5.jar.sha1 b/solr/licenses/spatial4j-0.5.jar.sha1 deleted file mode 100644 index c81a76cbe91b..000000000000 --- a/solr/licenses/spatial4j-0.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6e16edaf6b1ba76db7f08c2f3723fce3b358ecc3 diff --git a/solr/licenses/spatial4j-0.6.jar.sha1 b/solr/licenses/spatial4j-0.6.jar.sha1 new file mode 100644 index 000000000000..56c02ad3016c --- /dev/null +++ b/solr/licenses/spatial4j-0.6.jar.sha1 @@ -0,0 +1 @@ +21b15310bddcfd8c72611c180f20cf23279809a3 diff --git a/solr/licenses/spatial4j-NOTICE.txt b/solr/licenses/spatial4j-NOTICE.txt index d7d48d185c69..779b8df7e911 100644 --- a/solr/licenses/spatial4j-NOTICE.txt +++ b/solr/licenses/spatial4j-NOTICE.txt @@ -1,5 +1,133 @@ -Spatial4j -Copyright 2012-2014 The Apache Software Foundation +# about.md file -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). +## About This Content + +May 22, 2015 + +### License + +The Eclipse Foundation makes available all content in this plug-in ("Content"). Unless otherwise indicated below, the +Content is provided to you under the terms and conditions of the Apache License, Version 2.0. A copy of the Apache +License, Version 2.0 is available at +[http://www.apache.org/licenses/LICENSE-2.0.txt](http://www.apache.org/licenses/LICENSE-2.0.txt) + +If you did not receive this Content directly from the Eclipse Foundation, the Content is being redistributed by another +party ("Redistributor") and different terms and conditions may apply to your use of any object code in the Content. +Check the Redistributor’s license that was provided with the Content. If no such license exists, contact the +Redistributor. Unless otherwise indicated below, the terms and conditions of the Apache License, Version 2.0 still apply +to any source code in the Content and such source code may be obtained at +[http://www.eclipse.org](http://www.eclipse.org). + +# notice.md file + +Note: the below Eclipse user agreement is standard. It says "Unless otherwise indicated, "... before referring to the +EPL. We indicate above that all content is licensed under the ASLv2 license. -- David Smiley + +## Eclipse Foundation Software User Agreement + +April 9, 2014 + +### Usage Of Content + +THE ECLIPSE FOUNDATION MAKES AVAILABLE SOFTWARE, DOCUMENTATION, INFORMATION AND/OR OTHER MATERIALS FOR OPEN SOURCE +PROJECTS (COLLECTIVELY "CONTENT"). USE OF THE CONTENT IS GOVERNED BY THE TERMS AND CONDITIONS OF THIS AGREEMENT AND/OR +THE TERMS AND CONDITIONS OF LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW. BY USING THE CONTENT, YOU AGREE +THAT YOUR USE OF THE CONTENT IS GOVERNED BY THIS AGREEMENT AND/OR THE TERMS AND CONDITIONS OF ANY APPLICABLE LICENSE +AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW. IF YOU DO NOT AGREE TO THE TERMS AND CONDITIONS OF THIS AGREEMENT +AND THE TERMS AND CONDITIONS OF ANY APPLICABLE LICENSE AGREEMENTS OR NOTICES INDICATED OR REFERENCED BELOW, THEN YOU MAY +NOT USE THE CONTENT. + +### Applicable Licenses + +Unless otherwise indicated, all Content made available by the Eclipse Foundation is provided to you under the terms and +conditions of the Eclipse Public License Version 1.0 ("EPL"). A copy of the EPL is provided with this Content and is +also available at [http://www.eclipse.org/legal/epl-v10.html](http://www.eclipse.org/legal/epl-v10.html). For purposes +of the EPL, "Program" will mean the Content. + +Content includes, but is not limited to, source code, object code, documentation and other files maintained in the +Eclipse Foundation source code repository ("Repository") in software modules ("Modules") and made available as +downloadable archives ("Downloads"). + +* Content may be structured and packaged into modules to facilitate delivering, extending, and upgrading the Content. + Typical modules may include plug-ins ("Plug-ins"), plug-in fragments ("Fragments"), and features ("Features"). +* Each Plug-in or Fragment may be packaged as a sub-directory or JAR (Java™ ARchive) in a directory named "plugins". +* A Feature is a bundle of one or more Plug-ins and/or Fragments and associated material. Each Feature may be packaged + as a sub-directory in a directory named "features". Within a Feature, files named "feature.xml" may contain a list + of the names and version numbers of the Plug-ins and/or Fragments associated with that Feature. +* Features may also include other Features ("Included Features"). Within a Feature, files named "feature.xml" may + contain a list of the names and version numbers of Included Features. + +The terms and conditions governing Plug-ins and Fragments should be contained in files named "about.html" ("Abouts"). +The terms and conditions governing Features and Included Features should be contained in files named "license.html" +("Feature Licenses"). Abouts and Feature Licenses may be located in any directory of a Download or Module including, but +not limited to the following locations: + +* The top-level (root) directory +* Plug-in and Fragment directories +* Inside Plug-ins and Fragments packaged as JARs +* Sub-directories of the directory named "src" of certain Plug-ins +* Feature directories + +Note: if a Feature made available by the Eclipse Foundation is installed using the Provisioning Technology (as defined +below), you must agree to a license ("Feature Update License") during the installation process. If the Feature contains +Included Features, the Feature Update License should either provide you with the terms and conditions governing the +Included Features or inform you where you can locate them. Feature Update Licenses may be found in the "license" +property of files named "feature.properties" found within a Feature. Such Abouts, Feature Licenses, and Feature Update +Licenses contain the terms and conditions (or references to such terms and conditions) that govern your use of the +associated Content in that directory. + +THE ABOUTS, FEATURE LICENSES, AND FEATURE UPDATE LICENSES MAY REFER TO THE EPL OR OTHER LICENSE AGREEMENTS, NOTICES OR +TERMS AND CONDITIONS. SOME OF THESE OTHER LICENSE AGREEMENTS MAY INCLUDE (BUT ARE NOT LIMITED TO): + +* Eclipse Distribution License Version 1.0 (available at + [http://www.eclipse.org/licenses/edl-v1.0.html](http://www.eclipse.org/licenses/edl-v10.html)) +* Common Public License Version 1.0 (available at + [http://www.eclipse.org/legal/cpl-v10.html](http://www.eclipse.org/legal/cpl-v10.html)) +* Apache Software License 1.1 (available at + [http://www.apache.org/licenses/LICENSE](http://www.apache.org/licenses/LICENSE)) +* Apache Software License 2.0 (available at + [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)) +* Mozilla Public License Version 1.1 (available at + [http://www.mozilla.org/MPL/MPL-1.1.html](http://www.mozilla.org/MPL/MPL-1.1.html)) + +IT IS YOUR OBLIGATION TO READ AND ACCEPT ALL SUCH TERMS AND CONDITIONS PRIOR TO USE OF THE CONTENT. If no About, Feature +License, or Feature Update License is provided, please contact the Eclipse Foundation to determine what terms and +conditions govern that particular Content. + +### Use of Provisioning Technology + +The Eclipse Foundation makes available provisioning software, examples of which include, but are not limited to, p2 and +the Eclipse Update Manager ("Provisioning Technology") for the purpose of allowing users to install software, +documentation, information and/or other materials (collectively "Installable Software"). This capability is provided +with the intent of allowing such users to install, extend and update Eclipse-based products. Information about packaging +Installable Software is available at +[http://eclipse.org/equinox/p2/repository_packaging.html](http://eclipse.org/equinox/p2/repository_packaging.html) +("Specification"). + +You may use Provisioning Technology to allow other parties to install Installable Software. You shall be responsible for +enabling the applicable license agreements relating to the Installable Software to be presented to, and accepted by, the +users of the Provisioning Technology in accordance with the Specification. By using Provisioning Technology in such a +manner and making it available in accordance with the Specification, you further acknowledge your agreement to, and the +acquisition of all necessary rights to permit the following: + +1. A series of actions may occur ("Provisioning Process") in which a user may execute the Provisioning Technology on a + machine ("Target Machine") with the intent of installing, extending or updating the functionality of an + Eclipse-based product. +2. During the Provisioning Process, the Provisioning Technology may cause third party Installable Software or a portion + thereof to be accessed and copied to the Target Machine. +3. Pursuant to the Specification, you will provide to the user the terms and conditions that govern the use of the + Installable Software ("Installable Software Agreement") and such Installable Software Agreement shall be accessed + from the Target Machine in accordance with the Specification. Such Installable Software Agreement must inform the + user of the terms and conditions that govern the Installable Software and must solicit acceptance by the end user in + the manner prescribed in such Installable Software Agreement. Upon such indication of agreement by the user, the + provisioning Technology will complete installation of the Installable Software. + +### Cryptography + +Content may contain encryption software. The country in which you are currently may have restrictions on the import, +possession, and use, and/or re-export to another country, of encryption software. BEFORE using any encryption software, +please check the country's laws, regulations and policies concerning the import, possession, or use, and re-export of +encryption software, to see if this is permitted. + +Java and all Java-based trademarks are trademarks of Oracle Corporation in the United States, other countries, +or both. \ No newline at end of file From 6dcb01ccb4ec77a1cf2fa89155335ead689535c1 Mon Sep 17 00:00:00 2001 From: David Smiley Date: Wed, 2 Mar 2016 23:03:44 -0500 Subject: [PATCH 0008/1113] SOLR-8764: test schema-latest.xml spatial dist units should be kilometers (no test uses yet?) (cherry picked from commit deb6a49) --- .../core/src/test-files/solr/collection1/conf/schema_latest.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml index 803d45eed770..ef82aa189b70 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml @@ -718,7 +718,7 @@ http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4 --> + geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" /> - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - 6.0.0 + 6.1.0 - diff --git a/solr/build.xml b/solr/build.xml index 1ab1a9279862..218bf8ce6d39 100644 --- a/solr/build.xml +++ b/solr/build.xml @@ -444,7 +444,7 @@ - From 267e326dbf137de5357e0aca7418f648752cb22a Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Mon, 7 Mar 2016 05:22:40 -0500 Subject: [PATCH 0047/1113] LUCENE-7072: always use WGS84 planet model in Geo3DPoint --- lucene/CHANGES.txt | 3 ++ .../apache/lucene/geo3d/BasePlanetObject.java | 5 +++ .../org/apache/lucene/geo3d/Geo3DPoint.java | 43 ++++++++----------- .../lucene/geo3d/PointInGeo3DShapeQuery.java | 30 ++++++------- .../apache/lucene/geo3d/TestGeo3DPoint.java | 25 +++++------ 5 files changed, 52 insertions(+), 54 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index c23a3fe08a29..3647d5d191cb 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -118,6 +118,9 @@ API Changes * LUCENE-7064: MultiPhraseQuery is now immutable and should be constructed with MultiPhraseQuery.Builder. (Luc Vanlerberghe via Adrien Grand) +* LUCENE-7072: Geo3DPoint always uses WGS84 planet model. + (Robert Muir, Mike McCandless) + Optimizations * LUCENE-6891: Use prefix coding when writing points in diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BasePlanetObject.java b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BasePlanetObject.java index b5e3d286adbe..c64b974fd1fb 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BasePlanetObject.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BasePlanetObject.java @@ -34,6 +34,11 @@ public abstract class BasePlanetObject { public BasePlanetObject(final PlanetModel planetModel) { this.planetModel = planetModel; } + + /** Returns the {@link PlanetModel} provided when this shape was created. */ + public PlanetModel getPlanetModel() { + return planetModel; + } @Override public int hashCode() { diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java index fbdb00d7a3c1..cde87f3c77ba 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java @@ -36,8 +36,6 @@ * @lucene.experimental */ public final class Geo3DPoint extends Field { - private final PlanetModel planetModel; - /** Indexing {@link FieldType}. */ public static final FieldType TYPE = new FieldType(); static { @@ -46,16 +44,15 @@ public final class Geo3DPoint extends Field { } /** - * Creates a new Geo3DPoint field with the specified lat, lon (in radians), given a planet model. + * Creates a new Geo3DPoint field with the specified lat, lon (in radians). * * @throws IllegalArgumentException if the field name is null or lat or lon are out of bounds */ - public Geo3DPoint(String name, PlanetModel planetModel, double lat, double lon) { + public Geo3DPoint(String name, double lat, double lon) { super(name, TYPE); - this.planetModel = planetModel; // Translate lat/lon to x,y,z: - final GeoPoint point = new GeoPoint(planetModel, lat, lon); - fillFieldsData(planetModel, point.x, point.y, point.z); + final GeoPoint point = new GeoPoint(PlanetModel.WGS84, lat, lon); + fillFieldsData(point.x, point.y, point.z); } /** @@ -63,40 +60,38 @@ public Geo3DPoint(String name, PlanetModel planetModel, double lat, double lon) * * @throws IllegalArgumentException if the field name is null or lat or lon are out of bounds */ - public Geo3DPoint(String name, PlanetModel planetModel, double x, double y, double z) { + public Geo3DPoint(String name, double x, double y, double z) { super(name, TYPE); - this.planetModel = planetModel; - fillFieldsData(planetModel, x, y, z); + fillFieldsData(x, y, z); } - private void fillFieldsData(PlanetModel planetModel, double x, double y, double z) { + private void fillFieldsData(double x, double y, double z) { byte[] bytes = new byte[12]; - encodeDimension(planetModel, x, bytes, 0); - encodeDimension(planetModel, y, bytes, Integer.BYTES); - encodeDimension(planetModel, z, bytes, 2*Integer.BYTES); + encodeDimension(x, bytes, 0); + encodeDimension(y, bytes, Integer.BYTES); + encodeDimension(z, bytes, 2*Integer.BYTES); fieldsData = new BytesRef(bytes); } // public helper methods (e.g. for queries) /** Encode single dimension */ - public static void encodeDimension(PlanetModel planetModel, double value, byte bytes[], int offset) { - NumericUtils.intToSortableBytes(Geo3DUtil.encodeValue(planetModel.getMaximumMagnitude(), value), bytes, offset); + public static void encodeDimension(double value, byte bytes[], int offset) { + NumericUtils.intToSortableBytes(Geo3DUtil.encodeValue(PlanetModel.WGS84.getMaximumMagnitude(), value), bytes, offset); } /** Decode single dimension */ - public static double decodeDimension(PlanetModel planetModel, byte value[], int offset) { - return Geo3DUtil.decodeValueCenter(planetModel.getMaximumMagnitude(), NumericUtils.sortableBytesToInt(value, offset)); + public static double decodeDimension(byte value[], int offset) { + return Geo3DUtil.decodeValueCenter(PlanetModel.WGS84.getMaximumMagnitude(), NumericUtils.sortableBytesToInt(value, offset)); } /** Returns a query matching all points inside the provided shape. * - * @param planetModel The {@link PlanetModel} to use, which must match what was used during indexing * @param field field name. must not be {@code null}. * @param shape Which {@link GeoShape} to match */ - public static Query newShapeQuery(PlanetModel planetModel, String field, GeoShape shape) { - return new PointInGeo3DShapeQuery(planetModel, field, shape); + public static Query newShapeQuery(String field, GeoShape shape) { + return new PointInGeo3DShapeQuery(field, shape); } @Override @@ -108,9 +103,9 @@ public String toString() { result.append(':'); BytesRef bytes = (BytesRef) fieldsData; - result.append(" x=" + decodeDimension(planetModel, bytes.bytes, bytes.offset)); - result.append(" y=" + decodeDimension(planetModel, bytes.bytes, bytes.offset + Integer.BYTES)); - result.append(" z=" + decodeDimension(planetModel, bytes.bytes, bytes.offset + 2*Integer.BYTES)); + result.append(" x=" + decodeDimension(bytes.bytes, bytes.offset)); + result.append(" y=" + decodeDimension(bytes.bytes, bytes.offset + Integer.BYTES)); + result.append(" z=" + decodeDimension(bytes.bytes, bytes.offset + 2*Integer.BYTES)); result.append('>'); return result.toString(); } diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java index 4d816963080b..9e2132d680db 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java @@ -40,14 +40,19 @@ class PointInGeo3DShapeQuery extends Query { final String field; - final PlanetModel planetModel; final GeoShape shape; /** The lats/lons must be clockwise or counter-clockwise. */ - public PointInGeo3DShapeQuery(PlanetModel planetModel, String field, GeoShape shape) { + public PointInGeo3DShapeQuery(String field, GeoShape shape) { this.field = field; - this.planetModel = planetModel; this.shape = shape; + + if (shape instanceof BasePlanetObject) { + BasePlanetObject planetObject = (BasePlanetObject) shape; + if (planetObject.getPlanetModel().equals(PlanetModel.WGS84) == false) { + throw new IllegalArgumentException("this qurey requires PlanetModel.WGS84, but got: " + planetObject.getPlanetModel()); + } + } } @Override @@ -88,7 +93,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { assert xyzSolid.getRelationship(shape) == GeoArea.WITHIN || xyzSolid.getRelationship(shape) == GeoArea.OVERLAPS: "expected WITHIN (1) or OVERLAPS (2) but got " + xyzSolid.getRelationship(shape) + "; shape="+shape+"; XYZSolid="+xyzSolid; */ - double planetMax = planetModel.getMaximumMagnitude(); + double planetMax = PlanetModel.WGS84.getMaximumMagnitude(); DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc()); @@ -103,9 +108,9 @@ public void visit(int docID) { @Override public void visit(int docID, byte[] packedValue) { assert packedValue.length == 12; - double x = Geo3DPoint.decodeDimension(planetModel, packedValue, 0); - double y = Geo3DPoint.decodeDimension(planetModel, packedValue, Integer.BYTES); - double z = Geo3DPoint.decodeDimension(planetModel, packedValue, 2 * Integer.BYTES); + double x = Geo3DPoint.decodeDimension(packedValue, 0); + double y = Geo3DPoint.decodeDimension(packedValue, Integer.BYTES); + double z = Geo3DPoint.decodeDimension(packedValue, 2 * Integer.BYTES); if (shape.isWithin(x, y, z)) { result.add(docID); } @@ -129,7 +134,7 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { assert yMin <= yMax; assert zMin <= zMax; - GeoArea xyzSolid = GeoAreaFactory.makeGeoArea(planetModel, xMin, xMax, yMin, yMax, zMin, zMax); + GeoArea xyzSolid = GeoAreaFactory.makeGeoArea(PlanetModel.WGS84, xMin, xMax, yMin, yMax, zMin, zMax); switch(xyzSolid.getRelationship(shape)) { case GeoArea.CONTAINS: @@ -165,10 +170,6 @@ public String getField() { return field; } - public PlanetModel getPlanetModel() { - return planetModel; - } - public GeoShape getShape() { return shape; } @@ -182,13 +183,12 @@ public boolean equals(Object o) { PointInGeo3DShapeQuery that = (PointInGeo3DShapeQuery) o; - return planetModel.equals(that.planetModel) && shape.equals(that.shape); + return shape.equals(that.shape); } @Override public final int hashCode() { int result = super.hashCode(); - result = 31 * result + planetModel.hashCode(); result = 31 * result + shape.hashCode(); return result; } @@ -203,8 +203,6 @@ public String toString(String field) { sb.append(this.field); sb.append(':'); } - sb.append(" PlanetModel: "); - sb.append(planetModel); sb.append(" Shape: "); sb.append(shape); return sb.toString(); diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java index 9d00d3e6ccb4..17a40755d2fc 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java @@ -106,13 +106,12 @@ public void testBasic() throws Exception { iwc.setCodec(getCodec()); IndexWriter w = new IndexWriter(dir, iwc); Document doc = new Document(); - doc.add(new Geo3DPoint("field", PlanetModel.WGS84, toRadians(50.7345267), toRadians(-97.5303555))); + doc.add(new Geo3DPoint("field", toRadians(50.7345267), toRadians(-97.5303555))); w.addDocument(doc); IndexReader r = DirectoryReader.open(w); // We can't wrap with "exotic" readers because the query must see the BKD3DDVFormat: IndexSearcher s = newSearcher(r, false); - assertEquals(1, s.search(Geo3DPoint.newShapeQuery(PlanetModel.WGS84, - "field", + assertEquals(1, s.search(Geo3DPoint.newShapeQuery("field", GeoCircleFactory.makeGeoCircle(PlanetModel.WGS84, toRadians(50), toRadians(-97), Math.PI/180.)), 1).totalHits); w.close(); r.close(); @@ -640,8 +639,6 @@ private static GeoShape randomShape(PlanetModel planetModel) { private static void verify(double[] lats, double[] lons) throws Exception { IndexWriterConfig iwc = newIndexWriterConfig(); - PlanetModel planetModel = getPlanetModel(); - // Else we can get O(N^2) merging: int mbd = iwc.getMaxBufferedDocs(); if (mbd != -1 && mbd < lats.length/100) { @@ -662,7 +659,7 @@ private static void verify(double[] lats, double[] lons) throws Exception { doc.add(newStringField("id", ""+id, Field.Store.NO)); doc.add(new NumericDocValuesField("id", id)); if (Double.isNaN(lats[id]) == false) { - doc.add(new Geo3DPoint("point", planetModel, lats[id], lons[id])); + doc.add(new Geo3DPoint("point", lats[id], lons[id])); } w.addDocument(doc); if (id > 0 && random().nextInt(100) == 42) { @@ -710,13 +707,13 @@ private void _run() throws Exception { for (int iter=0;iter", point.toString()); + Geo3DPoint point = new Geo3DPoint("point", toRadians(44.244272), toRadians(7.769736)); + assertEquals("Geo3DPoint ", point.toString()); } public void testShapeQueryToString() { - assertEquals("PointInGeo3DShapeQuery: field=point: PlanetModel: PlanetModel.SPHERE Shape: GeoStandardCircle: {planetmodel=PlanetModel.SPHERE, center=[lat=0.3861041107739683, lon=0.06780373760536706], radius=0.1(5.729577951308232)}", - Geo3DPoint.newShapeQuery(PlanetModel.SPHERE, "point", GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, toRadians(44.244272), toRadians(7.769736), 0.1)).toString()); + assertEquals("PointInGeo3DShapeQuery: field=point: Shape: GeoStandardCircle: {planetmodel=PlanetModel.WGS84, center=[lat=0.3861041107739683, lon=0.06780373760536706], radius=0.1(5.729577951308232)}", + Geo3DPoint.newShapeQuery("point", GeoCircleFactory.makeGeoCircle(PlanetModel.WGS84, toRadians(44.244272), toRadians(7.769736), 0.1)).toString()); } private static Directory getDirectory() { From dd04b6173955d55348b3abaec4c2a3e875e12487 Mon Sep 17 00:00:00 2001 From: Shalin Shekhar Mangar Date: Mon, 7 Mar 2016 15:03:03 +0530 Subject: [PATCH 0048/1113] SOLR-8745: Deprecate costly ZkStateReader.updateClusterState(), replace with a narrow forceUpdateCollection(collection) (cherry picked from commit 093a8ce) --- solr/CHANGES.txt | 3 ++ .../hadoop/MorphlineGoLiveMiniMRTest.java | 1 - .../apache/solr/cloud/ElectionContext.java | 2 +- .../cloud/LeaderInitiatedRecoveryThread.java | 6 --- .../OverseerCollectionMessageHandler.java | 1 - .../org/apache/solr/cloud/ZkController.java | 2 +- .../solr/handler/CdcrRequestHandler.java | 2 +- .../solr/handler/admin/ClusterStatus.java | 3 -- .../handler/admin/CollectionsHandler.java | 2 - .../handler/admin/CoreAdminOperation.java | 6 +-- .../solr/handler/admin/RebalanceLeaders.java | 2 +- .../solr/cloud/BaseCdcrDistributedZkTest.java | 1 - .../solr/cloud/BasicDistributedZkTest.java | 4 +- .../cloud/ChaosMonkeyNothingIsSafeTest.java | 2 +- .../solr/cloud/ChaosMonkeyShardSplitTest.java | 2 +- .../solr/cloud/CollectionReloadTest.java | 2 +- .../cloud/CollectionTooManyReplicasTest.java | 6 +-- .../CollectionsAPIDistributedZkTest.java | 9 ++-- .../solr/cloud/CustomCollectionTest.java | 1 - .../apache/solr/cloud/DeleteShardTest.java | 2 - .../apache/solr/cloud/ForceLeaderTest.java | 11 ++--- .../apache/solr/cloud/HttpPartitionTest.java | 9 +--- .../LeaderFailoverAfterPartitionTest.java | 2 - .../LeaderInitiatedRecoveryOnCommitTest.java | 4 +- .../solr/cloud/MigrateRouteKeyTest.java | 4 +- .../org/apache/solr/cloud/OverseerTest.java | 12 ++--- .../solr/cloud/ReplicaPropertiesBase.java | 3 -- .../org/apache/solr/cloud/ShardSplitTest.java | 1 - .../org/apache/solr/cloud/SyncSliceTest.java | 1 - .../solr/cloud/TestCloudDeleteByQuery.java | 1 - .../apache/solr/cloud/TestCollectionAPI.java | 4 +- .../TestLeaderInitiatedRecoveryThread.java | 1 - .../solr/cloud/TestMiniSolrCloudCluster.java | 47 +++++++++---------- .../cloud/TestMiniSolrCloudClusterBase.java | 3 +- .../cloud/TestRandomRequestDistribution.java | 4 +- .../solr/cloud/TestRebalanceLeaders.java | 1 - .../solr/cloud/TestReplicaProperties.java | 1 - .../cloud/TestSolrCloudWithKerberosAlt.java | 1 + .../solr/cloud/UnloadDistributedZkTest.java | 4 +- .../apache/solr/cloud/ZkControllerTest.java | 2 +- .../solr/cloud/hdfs/StressHdfsTest.java | 3 +- .../cloud/overseer/ZkStateReaderTest.java | 6 +-- .../cloud/overseer/ZkStateWriterTest.java | 10 ++-- .../solr/common/cloud/ZkStateReader.java | 46 ++++++++++++++++++ .../solr/cloud/AbstractDistribZkTestBase.java | 4 +- .../cloud/AbstractFullDistribZkTestBase.java | 10 ++-- .../org/apache/solr/cloud/ChaosMonkey.java | 14 +----- 47 files changed, 129 insertions(+), 139 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 406776fe936f..d7ae2269e242 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -296,6 +296,9 @@ Optimizations * SOLR-8720: ZkController#publishAndWaitForDownStates should use #publishNodeAsDown. (Mark Miller) +* SOLR-8745: Deprecate costly ZkStateReader.updateClusterState(), replace with a narrow + forceUpdateCollection(collection) (Scott Blum via shalin) + Other Changes ---------------------- diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java index 1cc1723db9bd..95ed9b2b17d1 100644 --- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java +++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java @@ -646,7 +646,6 @@ public void test() throws Exception { } Thread.sleep(200); - cloudClient.getZkStateReader().updateClusterState(); } if (TEST_NIGHTLY) { diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java index 210787757bbc..38f6083bcb62 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java +++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java @@ -462,7 +462,7 @@ void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStart) throws Kee public void publishActiveIfRegisteredAndNotActive(SolrCore core) throws KeeperException, InterruptedException { if (core.getCoreDescriptor().getCloudDescriptor().hasRegistered()) { ZkStateReader zkStateReader = zkController.getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collection); ClusterState clusterState = zkStateReader.getClusterState(); Replica rep = (clusterState == null) ? null : clusterState.getReplica(collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP)); diff --git a/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java b/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java index 7a72a6782ee8..589ed83e8330 100644 --- a/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java +++ b/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java @@ -244,12 +244,6 @@ protected void sendRecoveryCommandWithRetry() throws Exception { // see if the replica's node is still live, if not, no need to keep doing this loop ZkStateReader zkStateReader = zkController.getZkStateReader(); - try { - zkStateReader.updateClusterState(); - } catch (Exception exc) { - log.warn("Error when updating cluster state: "+exc); - } - if (!zkStateReader.getClusterState().liveNodesContain(replicaNodeName)) { log.warn("Node "+replicaNodeName+" hosting core "+coreNeedingRecovery+ " is no longer live. No need to keep trying to tell it to recover!"); diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java index 6b7f6067d8b1..d7d894bc69b7 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java @@ -1371,7 +1371,6 @@ private void waitForNewShard(String collectionName, String sliceName) throws Kee return; } Thread.sleep(1000); - zkStateReader.updateClusterState(); } throw new SolrException(ErrorCode.SERVER_ERROR, "Could not find new slice " + sliceName + " in collection " + collectionName diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java index 7d2752a75aee..81897b717e92 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java @@ -883,7 +883,7 @@ public String register(String coreName, final CoreDescriptor desc, boolean recov } // make sure we have an update cluster state right away - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collection); return shardId; } finally { MDCLoggingContext.clear(); diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java index 585c8396d239..23e4abac304b 100644 --- a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java @@ -361,7 +361,7 @@ private void handleCollectionCheckpointAction(SolrQueryRequest req, SolrQueryRes throws IOException, SolrServerException { ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController(); try { - zkController.getZkStateReader().updateClusterState(); + zkController.getZkStateReader().forceUpdateCollection(collection); } catch (Exception e) { log.warn("Error when updating cluster state", e); } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java index 667d9fa11f53..ff60adc465b3 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java @@ -57,9 +57,6 @@ public ClusterStatus(ZkStateReader zkStateReader, ZkNodeProps props) { @SuppressWarnings("unchecked") public void getClusterStatus(NamedList results) throws KeeperException, InterruptedException { - zkStateReader.updateClusterState(); - - // read aliases Aliases aliases = zkStateReader.getAliases(); Map> collectionVsAliases = new HashMap<>(); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java index de2104f4d074..593dac81bcc6 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java @@ -920,8 +920,6 @@ private static void waitForActiveCollection(String collectionName, ZkNodeProps m + (checkLeaderOnly ? "leaders" : "replicas")); ZkStateReader zkStateReader = cc.getZkController().getZkStateReader(); for (int i = 0; i < numRetries; i++) { - - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); Collection shards = clusterState.getSlices(collectionName); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java index 8240189cf040..e755b82ff49f 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java @@ -461,6 +461,7 @@ public void call(CallInfo callInfo) throws InterruptedException, IOException, Ke // to accept updates CloudDescriptor cloudDescriptor = core.getCoreDescriptor() .getCloudDescriptor(); + String collection = cloudDescriptor.getCollectionName(); if (retry % 15 == 0) { if (retry > 0 && log.isInfoEnabled()) @@ -470,7 +471,7 @@ public void call(CallInfo callInfo) throws InterruptedException, IOException, Ke waitForState + "; forcing ClusterState update from ZooKeeper"); // force a cluster state update - coreContainer.getZkController().getZkStateReader().updateClusterState(); + coreContainer.getZkController().getZkStateReader().forceUpdateCollection(collection); } if (maxTries == 0) { @@ -483,7 +484,6 @@ public void call(CallInfo callInfo) throws InterruptedException, IOException, Ke } ClusterState clusterState = coreContainer.getZkController().getClusterState(); - String collection = cloudDescriptor.getCollectionName(); Slice slice = clusterState.getSlice(collection, cloudDescriptor.getShardId()); if (slice != null) { final Replica replica = slice.getReplicasMap().get(coreNodeName); @@ -937,4 +937,4 @@ long getIndexSize(SolrCore core) { return size; } -} \ No newline at end of file +} diff --git a/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java b/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java index 4626fc929674..98e796da73da 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java @@ -79,7 +79,7 @@ void execute() throws KeeperException, InterruptedException { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, String.format(Locale.ROOT, "The " + COLLECTION_PROP + " is required for the Rebalance Leaders command.")); } - coreContainer.getZkController().getZkStateReader().updateClusterState(); + coreContainer.getZkController().getZkStateReader().forceUpdateCollection(collectionName); ClusterState clusterState = coreContainer.getZkController().getClusterState(); DocCollection dc = clusterState.getCollection(collectionName); if (dc == null) { diff --git a/solr/core/src/test/org/apache/solr/cloud/BaseCdcrDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BaseCdcrDistributedZkTest.java index f1f3e9167b13..fe94309bba26 100644 --- a/solr/core/src/test/org/apache/solr/cloud/BaseCdcrDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/BaseCdcrDistributedZkTest.java @@ -635,7 +635,6 @@ protected void updateMappingsFromZk(String collection) throws Exception { try { cloudClient.connect(); ZkStateReader zkStateReader = cloudClient.getZkStateReader(); - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); DocCollection coll = clusterState.getCollection(collection); diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java index d25ce6648097..8222e91677f4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java @@ -552,7 +552,7 @@ private void testStopAndStartCoresInOneInstance() throws Exception { Thread.sleep(5000); ChaosMonkey.start(cloudJettys.get(0).jetty); - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection("multiunload2"); try { cloudClient.getZkStateReader().getLeaderRetry("multiunload2", "shard1", 30000); } catch (SolrException e) { @@ -830,7 +830,7 @@ private void testANewCollectionInOneInstanceWithManualShardAssignement() throws // we added a role of none on these creates - check for it ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(oneInstanceCollection2); Map slices = zkStateReader.getClusterState().getSlicesMap(oneInstanceCollection2); assertNotNull(slices); String roles = slices.get("slice1").getReplicasMap().values().iterator().next().getStr(ZkStateReader.ROLES_PROP); diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java index 8cc80d9c8433..7dceada16687 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java @@ -205,7 +205,7 @@ public void test() throws Exception { // TODO: assert we didnt kill everyone - zkStateReader.updateClusterState(); + zkStateReader.updateLiveNodes(); assertTrue(zkStateReader.getClusterState().getLiveNodes().size() > 0); diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java index 7a44561d38b8..190db573a504 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java @@ -206,7 +206,7 @@ private void waitTillRecovered() throws Exception { for (int i = 0; i < 30; i++) { Thread.sleep(3000); ZkStateReader zkStateReader = cloudClient.getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection("collection1"); ClusterState clusterState = zkStateReader.getClusterState(); DocCollection collection1 = clusterState.getCollection("collection1"); Slice slice = collection1.getSlice("shard1"); diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionReloadTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionReloadTest.java index b6eb5e2a494f..65ff78bf06a1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionReloadTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionReloadTest.java @@ -103,7 +103,7 @@ public void testReloadedLeaderStateAfterZkSessionLoss() throws Exception { timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutSecs, TimeUnit.SECONDS); while (System.nanoTime() < timeout) { // state of leader should be active after session loss recovery - see SOLR-7338 - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); ClusterState cs = cloudClient.getZkStateReader().getClusterState(); Slice slice = cs.getSlice(testCollectionName, shardId); replicaState = slice.getReplica(leader.getName()).getStr(ZkStateReader.STATE_PROP); diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java index 92fea45e3353..afc7c483fb08 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionTooManyReplicasTest.java @@ -97,7 +97,7 @@ public void testAddTooManyReplicas() throws Exception { assertEquals(0, response.getStatus()); ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collectionName); Slice slice = zkStateReader.getClusterState().getSlicesMap(collectionName).get("shard1"); Replica rep = null; @@ -194,7 +194,7 @@ public void testAddShard() throws Exception { // And finally, insure that there are all the replcias we expect. We should have shards 1, 2 and 4 and each // should have exactly two replicas ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collectionName); Map slices = zkStateReader.getClusterState().getSlicesMap(collectionName); assertEquals("There should be exaclty four slices", slices.size(), 4); assertNotNull("shardstart should exist", slices.get("shardstart")); @@ -275,7 +275,7 @@ public void testDownedShards() throws Exception { private List getAllNodeNames(String collectionName) throws KeeperException, InterruptedException { ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collectionName); Slice slice = zkStateReader.getClusterState().getSlicesMap(collectionName).get("shard1"); List nodes = new ArrayList<>(); diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java index 93f82acf20db..641dadfc236f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java @@ -368,7 +368,6 @@ private void deleteCollectionWithDownNodes() throws Exception { } Thread.sleep(200); - cloudClient.getZkStateReader().updateClusterState(); } assertFalse("Still found collection that should be gone", cloudClient.getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2")); @@ -540,8 +539,6 @@ private void testErrorHandling() throws Exception { } private void testNoCollectionSpecified() throws Exception { - - cloudClient.getZkStateReader().updateClusterState(); assertFalse(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection")); assertFalse(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection2")); @@ -565,13 +562,13 @@ private void testNoCollectionSpecified() throws Exception { makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd); // in both cases, the collection should have default to the core name - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection("corewithnocollection"); + cloudClient.getZkStateReader().forceUpdateCollection("corewithnocollection2"); assertTrue(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection")); assertTrue(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection2")); } private void testNoConfigSetExist() throws Exception { - cloudClient.getZkStateReader().updateClusterState(); assertFalse(cloudClient.getZkStateReader().getClusterState().hasCollection("corewithnocollection3")); // try and create a SolrCore with no collection name @@ -592,7 +589,7 @@ private void testNoConfigSetExist() throws Exception { assertTrue(gotExp); TimeUnit.MILLISECONDS.sleep(200); // in both cases, the collection should have default to the core name - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection("corewithnocollection3"); Collection slices = cloudClient.getZkStateReader().getClusterState().getActiveSlices("corewithnocollection3"); int replicaCount = 0; diff --git a/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java index 081e96f4f081..0951b5d6fc02 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java @@ -409,7 +409,6 @@ private void testCreateShardRepFactor() throws Exception { int attempts = 0; while (true) { if (attempts > 30) fail("Not enough active replicas in the shard 'x'"); - zkStateReader.updateClusterState(); attempts++; replicaCount = zkStateReader.getClusterState().getSlice(collectionName, "x").getReplicas().size(); if (replicaCount >= 1) break; diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java index 101bfb98c20a..812fbe932187 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java @@ -96,7 +96,6 @@ protected void confirmShardDeletion(String shard) throws SolrServerException, Ke ClusterState clusterState = zkStateReader.getClusterState(); int counter = 10; while (counter-- > 0) { - zkStateReader.updateClusterState(); clusterState = zkStateReader.getClusterState(); if (clusterState.getSlice("collection1", shard) == null) { break; @@ -142,7 +141,6 @@ protected void setSliceState(String slice, State state) throws SolrServerExcepti boolean transition = false; for (int counter = 10; counter > 0; counter--) { - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); State sliceState = clusterState.getSlice("collection1", slice).getState(); if (sliceState == state) { diff --git a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java index c68fe9c3a265..a71c3e614131 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java @@ -89,7 +89,7 @@ public void testReplicasInLIRNoLeader() throws Exception { putNonLeadersIntoLIR(testCollectionName, SHARD1, zkController, leader, notLeaders); - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); int numActiveReplicas = getNumberOfActiveReplicas(clusterState, testCollectionName, SHARD1); assertEquals("Expected only 0 active replica but found " + numActiveReplicas + @@ -114,7 +114,7 @@ public void testReplicasInLIRNoLeader() throws Exception { // By now we have an active leader. Wait for recoveries to begin waitForRecoveriesToFinish(testCollectionName, cloudClient.getZkStateReader(), true); - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); clusterState = cloudClient.getZkStateReader().getClusterState(); log.info("After forcing leader: " + clusterState.getSlice(testCollectionName, SHARD1)); // we have a leader @@ -187,7 +187,7 @@ public void testLastPublishedStateIsActive() throws Exception { setReplicaState(testCollectionName, SHARD1, rep, State.DOWN); } - zkController.getZkStateReader().updateClusterState(); + zkController.getZkStateReader().forceUpdateCollection(testCollectionName); // Assert all replicas are down and that there is no leader assertEquals(0, getActiveOrRecoveringReplicas(testCollectionName, SHARD1).size()); @@ -224,7 +224,6 @@ protected void unsetLeader(String collection, String slice) throws Exception { ClusterState clusterState = null; boolean transition = false; for (int counter = 10; counter > 0; counter--) { - zkStateReader.updateClusterState(); clusterState = zkStateReader.getClusterState(); Replica newLeader = clusterState.getSlice(collection, slice).getLeader(); if (newLeader == null) { @@ -259,7 +258,6 @@ protected void setReplicaState(String collection, String slice, Replica replica, Replica.State replicaState = null; for (int counter = 10; counter > 0; counter--) { - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); replicaState = clusterState.getSlice(collection, slice).getReplica(replica.getName()).getState(); if (replicaState == state) { @@ -355,7 +353,6 @@ void putNonLeadersIntoLIR(String collectionName, String shard, ZkController zkCo for (int j = 0; j < notLeaders.size(); j++) lirStates[j] = zkController.getLeaderInitiatedRecoveryState(collectionName, shard, notLeaders.get(j).getName()); - zkController.getZkStateReader().updateClusterState(); ClusterState clusterState = zkController.getZkStateReader().getClusterState(); boolean allDown = true; for (State lirState : lirStates) @@ -391,7 +388,7 @@ protected void bringBackOldLeaderAndSendDoc(String collection, Replica leader, L JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(leader)); leaderJetty.start(); waitForRecoveriesToFinish(collection, cloudClient.getZkStateReader(), true); - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection(collection); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); log.info("After bringing back leader: " + clusterState.getSlice(collection, SHARD1)); int numActiveReplicas = getNumberOfActiveReplicas(clusterState, collection, SHARD1); diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java index 8fecc84045ad..f1960aa952ea 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java @@ -215,7 +215,7 @@ protected void testMinRf() throws Exception { // Verify that the partitioned replica is DOWN ZkStateReader zkr = cloudClient.getZkStateReader(); - zkr.updateClusterState(); // force the state to be fresh + zkr.forceUpdateCollection(testCollectionName);; // force the state to be fresh ClusterState cs = zkr.getClusterState(); Collection slices = cs.getActiveSlices(testCollectionName); Slice slice = slices.iterator().next(); @@ -645,18 +645,13 @@ protected void waitToSeeReplicasActive(String testCollectionName, String shardId final RTimer timer = new RTimer(); ZkStateReader zkr = cloudClient.getZkStateReader(); - zkr.updateClusterState(); // force the state to be fresh - + zkr.forceUpdateCollection(testCollectionName); ClusterState cs = zkr.getClusterState(); Collection slices = cs.getActiveSlices(testCollectionName); boolean allReplicasUp = false; long waitMs = 0L; long maxWaitMs = maxWaitSecs * 1000L; while (waitMs < maxWaitMs && !allReplicasUp) { - // refresh state every 2 secs - if (waitMs % 2000 == 0) - cloudClient.getZkStateReader().updateClusterState(); - cs = cloudClient.getZkStateReader().getClusterState(); assertNotNull(cs); Slice shard = cs.getSlice(testCollectionName, shardId); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java index 6fd7c534809f..0436d5e874b3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java @@ -159,8 +159,6 @@ protected void testRf3WithLeaderFailover() throws Exception { long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS); while (System.nanoTime() < timeout) { - cloudClient.getZkStateReader().updateClusterState(); - List activeReps = getActiveOrRecoveringReplicas(testCollectionName, "shard1"); if (activeReps.size() >= 2) break; Thread.sleep(1000); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java index 8d2cc70c786a..7d6c633f4827 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java @@ -80,7 +80,7 @@ private void multiShardTest() throws Exception { Thread.sleep(sleepMsBeforeHealPartition); - cloudClient.getZkStateReader().updateClusterState(); // get the latest state + cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); // get the latest state leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); @@ -128,7 +128,7 @@ private void oneShardTest() throws Exception { sendCommitWithRetry(replica); Thread.sleep(sleepMsBeforeHealPartition); - cloudClient.getZkStateReader().updateClusterState(); // get the latest state + cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); // get the latest state leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); diff --git a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java index f9566e30f088..c09e0d1dc6a0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java @@ -72,7 +72,7 @@ private boolean waitForRuleToExpire(String splitKey, long finishTime) throws Kee boolean ruleRemoved = false; long expiryTime = finishTime + TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS); while (System.nanoTime() < expiryTime) { - getCommonCloudSolrClient().getZkStateReader().updateClusterState(); + getCommonCloudSolrClient().getZkStateReader().forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); state = getCommonCloudSolrClient().getZkStateReader().getClusterState(); slice = state.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD2); Map routingRules = slice.getRoutingRules(); @@ -186,7 +186,7 @@ protected void multipleShardMigrateTest() throws Exception { log.info("Response from target collection: " + response); assertEquals("DocCount on target collection does not match", splitKeyCount[0], response.getResults().getNumFound()); - getCommonCloudSolrClient().getZkStateReader().updateClusterState(); + getCommonCloudSolrClient().getZkStateReader().forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); ClusterState state = getCommonCloudSolrClient().getZkStateReader().getClusterState(); Slice slice = state.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD2); assertNotNull("Routing rule map is null", slice.getRoutingRules()); diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java index 66a214f7fcbf..85a88ec3ae9d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java @@ -439,7 +439,6 @@ public void testShardAssignmentBigger() throws Exception { int cloudStateSliceCount = 0; for (int i = 0; i < 40; i++) { cloudStateSliceCount = 0; - reader.updateClusterState(); ClusterState state = reader.getClusterState(); final Map slices = state.getSlicesMap(collection); if (slices != null) { @@ -524,7 +523,6 @@ public void testShardAssignmentBigger() throws Exception { private void waitForCollections(ZkStateReader stateReader, String... collections) throws InterruptedException, KeeperException { int maxIterations = 100; while (0 < maxIterations--) { - stateReader.updateClusterState(); final ClusterState state = stateReader.getClusterState(); Set availableCollections = state.getCollections(); int availableCount = 0; @@ -605,7 +603,6 @@ public void testStateChange() throws Exception { private void verifyShardLeader(ZkStateReader reader, String collection, String shard, String expectedCore) throws InterruptedException, KeeperException { int maxIterations = 200; while(maxIterations-->0) { - reader.updateClusterState(); // poll state ZkNodeProps props = reader.getClusterState().getLeader(collection, shard); if(props!=null) { if(expectedCore.equals(props.getStr(ZkStateReader.CORE_NAME_PROP))) { @@ -832,7 +829,8 @@ public void testShardLeaderChange() throws Exception { killerThread = new Thread(killer); killerThread.start(); - reader = new ZkStateReader(controllerClient); //no watches, we'll poll + reader = new ZkStateReader(controllerClient); + reader.createClusterStateWatchersAndUpdate(); for (int i = 0; i < atLeast(4); i++) { killCounter.incrementAndGet(); //for each round allow 1 kill @@ -905,9 +903,10 @@ public void testDoubleAssignment() throws Exception { mockController = new MockZKController(server.getZkAddress(), "node1"); mockController.publishState(collection, "core1", "core_node1", Replica.State.RECOVERING, 1); - while (version == getClusterStateVersion(controllerClient)); + while (version == reader.getClusterState().getZkClusterStateVersion()) { + Thread.sleep(100); + } - reader.updateClusterState(); ClusterState state = reader.getClusterState(); int numFound = 0; @@ -1048,7 +1047,6 @@ public void testPerformance() throws Exception { assertTrue(overseers.size() > 0); while (true) { - reader.updateClusterState(); ClusterState state = reader.getClusterState(); if (state.hasCollection("perf_sentinel")) { break; diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java b/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java index 8347af093722..fe83a8431a3d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java @@ -56,7 +56,6 @@ public static void verifyPropertyNotPresent(CloudSolrClient client, String colle ClusterState clusterState = null; Replica replica = null; for (int idx = 0; idx < 300; ++idx) { - client.getZkStateReader().updateClusterState(); clusterState = client.getZkStateReader().getClusterState(); replica = clusterState.getReplica(collectionName, replicaName); if (replica == null) { @@ -82,7 +81,6 @@ public static void verifyPropertyVal(CloudSolrClient client, String collectionNa ClusterState clusterState = null; for (int idx = 0; idx < 300; ++idx) { // Keep trying while Overseer writes the ZK state for up to 30 seconds. - client.getZkStateReader().updateClusterState(); clusterState = client.getZkStateReader().getClusterState(); replica = clusterState.getReplica(collectionName, replicaName); if (replica == null) { @@ -116,7 +114,6 @@ public static void verifyUnique(CloudSolrClient client, String collectionName, S DocCollection col = null; for (int idx = 0; idx < 300; ++idx) { - client.getZkStateReader().updateClusterState(); ClusterState clusterState = client.getZkStateReader().getClusterState(); col = clusterState.getCollection(collectionName); diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java index 22735abdb251..6d4b9cc3b8f9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java @@ -416,7 +416,6 @@ protected void checkDocCountsAndShardStates(int[] docCounts, int numReplicas) th int i = 0; for (i = 0; i < 10; i++) { ZkStateReader zkStateReader = cloudClient.getZkStateReader(); - zkStateReader.updateClusterState(); clusterState = zkStateReader.getClusterState(); slice1_0 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, "shard1_0"); slice1_1 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, "shard1_1"); diff --git a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java index e753be9f459c..362009e684bc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java @@ -218,7 +218,6 @@ private void waitTillAllNodesActive() throws Exception { for (int i = 0; i < 60; i++) { Thread.sleep(3000); ZkStateReader zkStateReader = cloudClient.getZkStateReader(); - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); DocCollection collection1 = clusterState.getCollection("collection1"); Slice slice = collection1.getSlice("shard1"); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java index a0bb42a3ee61..f4436eb9e65b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java @@ -119,7 +119,6 @@ private static void createMiniSolrCloudCluster() throws Exception { String nodeKey = jettyURL.getHost() + ":" + jettyURL.getPort() + jettyURL.getPath().replace("/","_"); urlMap.put(nodeKey, jettyURL.toString()); } - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); for (Slice slice : clusterState.getSlices(COLLECTION_NAME)) { String shardName = slice.getName(); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java index b203f02c8770..45b6f733becf 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java @@ -625,7 +625,7 @@ private void testClusterStateMigration() throws Exception { .setCollectionName("testClusterStateMigration") .process(client); - client.getZkStateReader().updateClusterState(); + client.getZkStateReader().forceUpdateCollection("testClusterStateMigration"); assertEquals(2, client.getZkStateReader().getClusterState().getCollection("testClusterStateMigration").getStateFormat()); @@ -735,7 +735,7 @@ private void testShardCreationNameValidation() throws Exception { private Map getProps(CloudSolrClient client, String collectionName, String replicaName, String... props) throws KeeperException, InterruptedException { - client.getZkStateReader().updateClusterState(); + client.getZkStateReader().forceUpdateCollection(collectionName); ClusterState clusterState = client.getZkStateReader().getClusterState(); Replica replica = clusterState.getReplica(collectionName, replicaName); if (replica == null) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java index f2c58cf808a3..11858f828b7f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java @@ -175,7 +175,6 @@ protected void updateLIRState(String replicaCoreNodeName) { timeOut = new TimeOut(30, TimeUnit.SECONDS); while (!timeOut.hasTimedOut()) { - cloudClient.getZkStateReader().updateClusterState(); Replica r = cloudClient.getZkStateReader().getClusterState().getReplica(DEFAULT_COLLECTION, replica.getName()); if (r.getState() == Replica.State.DOWN) { break; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java index 9be89190d424..880051b1f83f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java @@ -176,7 +176,7 @@ public void testCollectionCreateSearchDelete() throws Exception { assertEquals(1, rsp.getResults().getNumFound()); // remove a server not hosting any replicas - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collectionName); ClusterState clusterState = zkStateReader.getClusterState(); HashMap jettyMap = new HashMap(); for (JettySolrRunner jetty : miniCluster.getJettySolrRunners()) { @@ -321,7 +321,8 @@ public void testCollectionCreateWithoutCoresThenDelete() throws Exception { try (SolrZkClient zkClient = new SolrZkClient (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { - + zkStateReader.createClusterStateWatchersAndUpdate(); + // wait for collection to appear AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); @@ -368,6 +369,7 @@ public void testStopAllStartAll() throws Exception { try (SolrZkClient zkClient = new SolrZkClient (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { + zkStateReader.createClusterStateWatchersAndUpdate(); AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); // modify collection @@ -385,7 +387,7 @@ public void testStopAllStartAll() throws Exception { } // the test itself - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collectionName); final ClusterState clusterState = zkStateReader.getClusterState(); final HashSet leaderIndices = new HashSet(); @@ -444,7 +446,7 @@ public void testStopAllStartAll() throws Exception { } AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collectionName); // re-query collection { @@ -489,32 +491,29 @@ public void testSegmentTerminateEarly() throws Exception { } } - try (SolrZkClient zkClient = new SolrZkClient - (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null); - ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { - AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); + ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader(); + AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); - // add some documents, then optimize to get merged-sorted segments - tstes.addDocuments(cloudSolrClient, 10, 10, true); + // add some documents, then optimize to get merged-sorted segments + tstes.addDocuments(cloudSolrClient, 10, 10, true); - // CommonParams.SEGMENT_TERMINATE_EARLY parameter intentionally absent - tstes.queryTimestampDescending(cloudSolrClient); + // CommonParams.SEGMENT_TERMINATE_EARLY parameter intentionally absent + tstes.queryTimestampDescending(cloudSolrClient); - // add a few more documents, but don't optimize to have some not-merge-sorted segments - tstes.addDocuments(cloudSolrClient, 2, 10, false); + // add a few more documents, but don't optimize to have some not-merge-sorted segments + tstes.addDocuments(cloudSolrClient, 2, 10, false); - // CommonParams.SEGMENT_TERMINATE_EARLY parameter now present - tstes.queryTimestampDescendingSegmentTerminateEarlyYes(cloudSolrClient); - tstes.queryTimestampDescendingSegmentTerminateEarlyNo(cloudSolrClient); + // CommonParams.SEGMENT_TERMINATE_EARLY parameter now present + tstes.queryTimestampDescendingSegmentTerminateEarlyYes(cloudSolrClient); + tstes.queryTimestampDescendingSegmentTerminateEarlyNo(cloudSolrClient); - // CommonParams.SEGMENT_TERMINATE_EARLY parameter present but it won't be used - tstes.queryTimestampDescendingSegmentTerminateEarlyYesGrouped(cloudSolrClient); - tstes.queryTimestampAscendingSegmentTerminateEarlyYes(cloudSolrClient); // uses a sort order that is _not_ compatible with the merge sort order + // CommonParams.SEGMENT_TERMINATE_EARLY parameter present but it won't be used + tstes.queryTimestampDescendingSegmentTerminateEarlyYesGrouped(cloudSolrClient); + tstes.queryTimestampAscendingSegmentTerminateEarlyYes(cloudSolrClient); // uses a sort order that is _not_ compatible with the merge sort order - // delete the collection we created earlier - miniCluster.deleteCollection(collectionName); - AbstractDistribZkTestBase.waitForCollectionToDisappear(collectionName, zkStateReader, true, true, 330); - } + // delete the collection we created earlier + miniCluster.deleteCollection(collectionName); + AbstractDistribZkTestBase.waitForCollectionToDisappear(collectionName, zkStateReader, true, true, 330); } finally { miniCluster.shutdown(); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterBase.java b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterBase.java index 54b21dff5de8..18285617d9a9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterBase.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterBase.java @@ -146,6 +146,7 @@ protected void testCollectionCreateSearchDelete(String collectionName) throws Ex try (SolrZkClient zkClient = new SolrZkClient (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { + zkStateReader.createClusterStateWatchersAndUpdate(); AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); // modify/query collection @@ -160,7 +161,7 @@ protected void testCollectionCreateSearchDelete(String collectionName) throws Ex assertEquals(1, rsp.getResults().getNumFound()); // remove a server not hosting any replicas - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collectionName); ClusterState clusterState = zkStateReader.getClusterState(); HashMap jettyMap = new HashMap(); for (JettySolrRunner jetty : miniCluster.getJettySolrRunners()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java index 25ffe8425695..256774d08c39 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java @@ -88,7 +88,7 @@ private void testRequestTracking() throws Exception { waitForRecoveriesToFinish("a1x2", true); waitForRecoveriesToFinish("b1x1", true); - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection("b1x1"); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); DocCollection b1x1 = clusterState.getCollection("b1x1"); @@ -137,7 +137,7 @@ private void testQueryAgainstDownReplica() throws Exception { waitForRecoveriesToFinish("football", true); - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection("football"); Replica leader = null; Replica notLeader = null; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java index 3c720bfeddfc..9208229976a3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java @@ -310,7 +310,6 @@ boolean waitForAllPreferreds() throws KeeperException, InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS); while (! timeout.hasTimedOut()) { goAgain = false; - cloudClient.getZkStateReader().updateClusterState(); Map slices = cloudClient.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlicesMap(); for (Map.Entry ent : expected.entrySet()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java index 5cc15e2ba366..fc2a7e25740e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java @@ -192,7 +192,6 @@ private void verifyLeaderAssignment(CloudSolrClient client, String collectionNam String lastFailMsg = ""; for (int idx = 0; idx < 300; ++idx) { // Keep trying while Overseer writes the ZK state for up to 30 seconds. lastFailMsg = ""; - client.getZkStateReader().updateClusterState(); ClusterState clusterState = client.getZkStateReader().getClusterState(); for (Slice slice : clusterState.getSlices(collectionName)) { Boolean foundLeader = false; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java index 4d3ee30ad696..f4dc97de95b0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java @@ -205,6 +205,7 @@ protected void testCollectionCreateSearchDelete() throws Exception { try (SolrZkClient zkClient = new SolrZkClient (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { + zkStateReader.createClusterStateWatchersAndUpdate(); AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); // modify/query collection diff --git a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java index dd337fb8530e..7d53feebf73c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java @@ -187,7 +187,7 @@ private void testCoreUnloadAndLeaders() throws Exception { } ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection("unloadcollection"); int slices = zkStateReader.getClusterState().getCollection("unloadcollection").getSlices().size(); assertEquals(1, slices); @@ -203,7 +203,7 @@ private void testCoreUnloadAndLeaders() throws Exception { createCmd.setDataDir(getDataDir(core2dataDir)); adminClient.request(createCmd); } - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection("unloadcollection"); slices = zkStateReader.getClusterState().getCollection("unloadcollection").getSlices().size(); assertEquals(1, slices); diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java index cffbb543e493..7b293ca5ea6e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java @@ -296,7 +296,7 @@ public List getCurrentDescriptors() { byte[] bytes = Utils.toJSON(state); zkController.getZkClient().makePath(ZkStateReader.getCollectionPath("testPublishAndWaitForDownStates"), bytes, CreateMode.PERSISTENT, true); - zkController.getZkStateReader().updateClusterState(); + zkController.getZkStateReader().forceUpdateCollection("testPublishAndWaitForDownStates"); assertTrue(zkController.getZkStateReader().getClusterState().hasCollection("testPublishAndWaitForDownStates")); assertNotNull(zkController.getZkStateReader().getClusterState().getCollection("testPublishAndWaitForDownStates")); diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java index 445c4b8f615d..601f4fe723ab 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java @@ -154,7 +154,7 @@ private void createAndDeleteCollection() throws SolrServerException, waitForRecoveriesToFinish(DELETE_DATA_DIR_COLLECTION, false); cloudClient.setDefaultCollection(DELETE_DATA_DIR_COLLECTION); - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection(DELETE_DATA_DIR_COLLECTION); for (int i = 1; i < nShards + 1; i++) { cloudClient.getZkStateReader().getLeaderRetry(DELETE_DATA_DIR_COLLECTION, "shard" + i, 30000); @@ -211,7 +211,6 @@ private void createAndDeleteCollection() throws SolrServerException, } Thread.sleep(200); - cloudClient.getZkStateReader().updateClusterState(); } // check that all dirs are gone diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java index 69626b0828b2..10cc46c51657 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java @@ -94,7 +94,7 @@ public void testStateFormatUpdate(boolean explicitRefresh, boolean isInteresting assertFalse(exists); if (explicitRefresh) { - reader.updateClusterState(); + reader.forceUpdateCollection("c1"); } else { for (int i = 0; i < 100; ++i) { if (reader.getClusterState().hasCollection("c1")) { @@ -122,7 +122,7 @@ public void testStateFormatUpdate(boolean explicitRefresh, boolean isInteresting assertTrue(exists); if (explicitRefresh) { - reader.updateClusterState(); + reader.forceUpdateCollection("c1"); } else { for (int i = 0; i < 100; ++i) { if (reader.getClusterState().getCollection("c1").getStateFormat() == 2) { @@ -167,7 +167,7 @@ public void testExternalCollectionWatchedNotWatched() throws Exception{ new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json")); writer.enqueueUpdate(reader.getClusterState(), c1, null); writer.writePendingUpdates(); - reader.updateClusterState(); + reader.forceUpdateCollection("c1"); assertTrue(reader.getClusterState().getCollectionRef("c1").isLazilyLoaded()); reader.addCollectionWatch("c1"); diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java index 8e7b0098121f..f5648bf148c7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java @@ -233,7 +233,8 @@ public void testExternalModificationToSharedClusterState() throws Exception { writer.enqueueUpdate(reader.getClusterState(), c1, null); writer.writePendingUpdates(); - reader.updateClusterState(); + reader.forceUpdateCollection("c1"); + reader.forceUpdateCollection("c2"); ClusterState clusterState = reader.getClusterState(); // keep a reference to the current cluster state object assertTrue(clusterState.hasCollection("c1")); assertFalse(clusterState.hasCollection("c2")); @@ -257,7 +258,6 @@ public void testExternalModificationToSharedClusterState() throws Exception { // expected } - reader.updateClusterState(); try { writer.enqueueUpdate(reader.getClusterState(), c2, null); fail("enqueueUpdate after BadVersionException should not have suceeded"); @@ -317,7 +317,7 @@ public void testExternalModificationToStateFormat2() throws Exception { zkClient.setData(ZkStateReader.getCollectionPath("c2"), data, true); // get the most up-to-date state - reader.updateClusterState(); + reader.forceUpdateCollection("c2"); state = reader.getClusterState(); assertTrue(state.hasCollection("c2")); assertEquals(sharedClusterStateVersion, (int) state.getZkClusterStateVersion()); @@ -328,7 +328,7 @@ public void testExternalModificationToStateFormat2() throws Exception { assertTrue(writer.hasPendingUpdates()); // get the most up-to-date state - reader.updateClusterState(); + reader.forceUpdateCollection("c2"); state = reader.getClusterState(); // enqueue a stateFormat=1 collection which should cause a flush @@ -336,7 +336,7 @@ public void testExternalModificationToStateFormat2() throws Exception { new DocCollection("c1", new HashMap(), new HashMap(), DocRouter.DEFAULT, 0, ZkStateReader.CLUSTER_STATE)); try { - state = writer.enqueueUpdate(state, c1, null); + writer.enqueueUpdate(state, c1, null); fail("Enqueue should not have succeeded"); } catch (KeeperException.BadVersionException bve) { // expected diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java index 3dbc6d2876d0..308b3e000a54 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java @@ -226,7 +226,10 @@ public ZkConfigManager getConfigManager() { /** * Forcibly refresh cluster state from ZK. Do this only to avoid race conditions because it's expensive. + * + * @deprecated Don't call this, call {@link #forceUpdateCollection(String)} on a single collection if you must. */ + @Deprecated public void updateClusterState() throws KeeperException, InterruptedException { synchronized (getUpdateLock()) { if (clusterState == null) { @@ -248,6 +251,49 @@ public void updateClusterState() throws KeeperException, InterruptedException { } } + /** + * Forcibly refresh a collection's internal state from ZK. Try to avoid having to resort to this when + * a better design is possible. + */ + public void forceUpdateCollection(String collection) throws KeeperException, InterruptedException { + synchronized (getUpdateLock()) { + if (clusterState == null) { + return; + } + + ClusterState.CollectionRef ref = clusterState.getCollectionRef(collection); + if (ref == null) { + // We don't know anything about this collection, maybe it's new? + // First try to update the legacy cluster state. + refreshLegacyClusterState(null); + if (!legacyCollectionStates.containsKey(collection)) { + // No dice, see if a new collection just got created. + LazyCollectionRef tryLazyCollection = new LazyCollectionRef(collection); + if (tryLazyCollection.get() == null) { + // No dice, just give up. + return; + } + // What do you know, it exists! + lazyCollectionStates.putIfAbsent(collection, tryLazyCollection); + } + } else if (ref.isLazilyLoaded()) { + if (ref.get() != null) { + return; + } + // Edge case: if there's no external collection, try refreshing legacy cluster state in case it's there. + refreshLegacyClusterState(null); + } else if (legacyCollectionStates.containsKey(collection)) { + // Exists, and lives in legacy cluster state, force a refresh. + refreshLegacyClusterState(null); + } else if (watchedCollectionStates.containsKey(collection)) { + // Exists as a watched collection, force a refresh. + DocCollection newState = fetchCollectionState(collection, null); + updateWatchedCollection(collection, newState); + } + constructState(); + } + } + /** Refresh the set of live nodes. */ public void updateLiveNodes() throws KeeperException, InterruptedException { refreshLiveNodes(null); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java index ff4238260803..7b3617ba86c2 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java @@ -145,7 +145,6 @@ public static void waitForRecoveriesToFinish(String collection, while (cont) { if (verbose) System.out.println("-"); boolean sawLiveRecovering = false; - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); Map slices = clusterState.getSlicesMap(collection); assertNotNull("Could not find collection:" + collection, slices); @@ -195,7 +194,6 @@ public static void waitForCollectionToDisappear(String collection, while (cont) { if (verbose) System.out.println("-"); - zkStateReader.updateClusterState(); ClusterState clusterState = zkStateReader.getClusterState(); if (!clusterState.hasCollection(collection)) break; if (cnt == timeoutSeconds) { @@ -239,7 +237,7 @@ public static void verifyReplicaStatus(ZkStateReader reader, String collection, protected void assertAllActive(String collection,ZkStateReader zkStateReader) throws KeeperException, InterruptedException { - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collection); ClusterState clusterState = zkStateReader.getClusterState(); Map slices = clusterState.getSlicesMap(collection); if (slices == null) { diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java index bf8f643656ba..a584dbd450bc 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java @@ -626,7 +626,7 @@ protected void updateMappingsFromZk(List jettys, List jettys, List clients, boolean allowOverSharding) throws Exception { ZkStateReader zkStateReader = cloudClient.getZkStateReader(); - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION); cloudJettys.clear(); shardToJetty.clear(); @@ -1814,7 +1814,7 @@ protected List ensureAllReplicasAreActive(String testCollectionName, St Map notLeaders = new HashMap<>(); ZkStateReader zkr = cloudClient.getZkStateReader(); - zkr.updateClusterState(); // force the state to be fresh + zkr.forceUpdateCollection(testCollectionName); // force the state to be fresh ClusterState cs = zkr.getClusterState(); Collection slices = cs.getActiveSlices(testCollectionName); @@ -1824,10 +1824,6 @@ protected List ensureAllReplicasAreActive(String testCollectionName, St long maxWaitMs = maxWaitSecs * 1000L; Replica leader = null; while (waitMs < maxWaitMs && !allReplicasUp) { - // refresh state every 2 secs - if (waitMs % 2000 == 0) - cloudClient.getZkStateReader().updateClusterState(); - cs = cloudClient.getZkStateReader().getClusterState(); assertNotNull(cs); Slice shard = cs.getSlice(testCollectionName, shardId); @@ -1879,7 +1875,7 @@ protected String printClusterStateInfo() throws Exception { } protected String printClusterStateInfo(String collection) throws Exception { - cloudClient.getZkStateReader().updateClusterState(); + cloudClient.getZkStateReader().forceUpdateCollection(collection); String cs = null; ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); if (collection != null) { diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java index d13d62f06835..511fdf34b157 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java @@ -425,7 +425,7 @@ private int checkIfKillIsLegal(String slice, int numActive) throws KeeperExcepti for (CloudJettyRunner cloudJetty : shardToJetty.get(slice)) { // get latest cloud state - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(collection); Slice theShards = zkStateReader.getClusterState().getSlicesMap(collection) .get(slice); @@ -447,18 +447,6 @@ private int checkIfKillIsLegal(String slice, int numActive) throws KeeperExcepti return numActive; } - public SolrClient getRandomClient(String slice) throws KeeperException, InterruptedException { - // get latest cloud state - zkStateReader.updateClusterState(); - - // get random shard - List clients = shardToClient.get(slice); - int index = LuceneTestCase.random().nextInt(clients.size() - 1); - SolrClient client = clients.get(index); - - return client; - } - // synchronously starts and stops shards randomly, unless there is only one // active shard up for a slice or if there is one active and others recovering public void startTheMonkey(boolean killLeaders, final int roundPauseUpperLimit) { From 5429356fc4ba021d2afe7766b832ca0dc93a0d57 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Mon, 7 Mar 2016 08:38:38 -0500 Subject: [PATCH 0049/1113] make test less evil --- .../src/test/org/apache/lucene/search/TestPointQueries.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java index 5a3483bc30fc..500bb8fd25f4 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java @@ -351,12 +351,12 @@ public void testRandomLongsMedium() throws Exception { @Nightly public void testRandomLongsBig() throws Exception { - doTestRandomLongs(200000); + doTestRandomLongs(100000); } private void doTestRandomLongs(int count) throws Exception { - int numValues = atLeast(count); + int numValues = TestUtil.nextInt(random(), count, count*2); if (VERBOSE) { System.out.println("TEST: numValues=" + numValues); From 5146e78a6445c1d1578154678518c8c904bf21e0 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Mon, 7 Mar 2016 22:44:36 +0530 Subject: [PATCH 0050/1113] SOLR-8736: schema GET operations on fields, dynamicFields, fieldTypes, copyField are reimplemented as a part of the bulk API with less details. The tests and write implementations are removed --- solr/CHANGES.txt | 5 + .../apache/solr/handler/SchemaHandler.java | 78 +++- .../apache/solr/rest/SolrSchemaRestApi.java | 56 +-- .../solr/rest/schema/BaseFieldResource.java | 146 ------- .../rest/schema/BaseFieldTypeResource.java | 98 ----- .../schema/CopyFieldCollectionResource.java | 198 ---------- .../DynamicFieldCollectionResource.java | 207 ---------- .../rest/schema/DynamicFieldResource.java | 197 ---------- .../rest/schema/FieldCollectionResource.java | 225 ----------- .../solr/rest/schema/FieldResource.java | 201 ---------- .../schema/FieldTypeCollectionResource.java | 197 ---------- .../solr/rest/schema/FieldTypeResource.java | 203 ---------- .../org/apache/solr/schema/IndexSchema.java | 11 +- .../org/apache/solr/servlet/HttpSolrCall.java | 3 + .../rest/schema/TestClassNameShortening.java | 3 +- .../TestCopyFieldCollectionResource.java | 96 +---- .../TestDynamicFieldCollectionResource.java | 29 -- .../rest/schema/TestDynamicFieldResource.java | 7 - .../schema/TestFieldCollectionResource.java | 45 --- .../solr/rest/schema/TestFieldResource.java | 23 +- .../TestFieldTypeCollectionResource.java | 1 + .../rest/schema/TestFieldTypeResource.java | 17 +- ...TestManagedSchemaDynamicFieldResource.java | 366 ----------------- .../TestManagedSchemaFieldResource.java | 369 ------------------ .../TestManagedSchemaFieldTypeResource.java | 350 ----------------- .../TestRemoveLastDynamicCopyField.java | 80 ---- .../schema/TestSchemaSimilarityResource.java | 1 - .../TestManagedStopFilterFactory.java | 2 +- .../TestManagedSynonymFilterFactory.java | 6 +- .../TestCloudManagedSchemaConcurrent.java | 3 +- .../solr/client/solrj/request/SchemaTest.java | 4 - 31 files changed, 125 insertions(+), 3102 deletions(-) delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/BaseFieldResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/BaseFieldTypeResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/CopyFieldCollectionResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldCollectionResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/FieldCollectionResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/FieldResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/FieldTypeCollectionResource.java delete mode 100644 solr/core/src/java/org/apache/solr/rest/schema/FieldTypeResource.java delete mode 100644 solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java delete mode 100644 solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java delete mode 100644 solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java delete mode 100644 solr/core/src/test/org/apache/solr/rest/schema/TestRemoveLastDynamicCopyField.java diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 645533b97e3c..14195ab7b695 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -102,6 +102,8 @@ Upgrading from Solr 5.x * When requesting stats in date fields, "sum" is now a double value instead of a date. See SOLR-8671 +* SOLR-8736: The deprecated GET methods for schema are now accessible and implemented differently + Detailed Change List ---------------------- @@ -394,6 +396,9 @@ Other Changes * SOLR-8423: DeleteShard and DeleteReplica should cleanup instance and data directory by default and add support for optionally retaining the directories. (Anshum Gupta) +* SOLR-8736: schema GET operations on fields, dynamicFields, fieldTypes, copyField are + reimplemented as a part of the bulk API with less details (noble) + ================== 5.5.1 ================== Bug Fixes diff --git a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java index 046de46787ed..4279864b6ba6 100644 --- a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java @@ -20,15 +20,20 @@ import java.lang.invoke.MethodHandles; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.Set; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import org.apache.solr.cloud.ZkSolrResourceLoader; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.ContentStream; -import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; +import org.apache.solr.common.util.StrUtils; import org.apache.solr.core.SolrCore; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; @@ -42,17 +47,29 @@ import org.slf4j.LoggerFactory; import static org.apache.solr.common.params.CommonParams.JSON; -import static org.apache.solr.core.ConfigSetProperties.IMMUTABLE_CONFIGSET_ARG; public class SchemaHandler extends RequestHandlerBase implements SolrCoreAware { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private boolean isImmutableConfigSet = false; - @Override - public void init(NamedList args) { - super.init(args); + private static final Map level2; + + static { + Set s = ImmutableSet.of( + IndexSchema.FIELD_TYPES, + IndexSchema.FIELDS, + IndexSchema.DYNAMIC_FIELDS, + IndexSchema.COPY_FIELDS + ); + Map m = new HashMap<>(); + for (String s1 : s) { + m.put(s1, s1); + m.put(s1.toLowerCase(Locale.ROOT), s1); + } + level2 = ImmutableMap.copyOf(m); } + @Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { SolrConfigHandler.setWt(req, JSON); @@ -150,6 +167,33 @@ private void handleGET(SolrQueryRequest req, SolrQueryResponse rsp) { break; } default: { + List parts = StrUtils.splitSmart(path, '/'); + if (parts.get(0).isEmpty()) parts.remove(0); + if (parts.size() > 1 && level2.containsKey(parts.get(1))) { + String realName = level2.get(parts.get(1)); + SimpleOrderedMap propertyValues = req.getSchema().getNamedPropertyValues(req.getParams()); + Object o = propertyValues.get(realName); + if(parts.size()> 2) { + String name = parts.get(2); + if (o instanceof List) { + List list = (List) o; + for (Object obj : list) { + if (obj instanceof SimpleOrderedMap) { + SimpleOrderedMap simpleOrderedMap = (SimpleOrderedMap) obj; + if(name.equals(simpleOrderedMap.get("name"))) { + rsp.add(realName.substring(0, realName.length() - 1), simpleOrderedMap); + return; + } + } + } + } + throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "No such path " + path); + } else { + rsp.add(realName, o); + } + return; + } + throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "No such path " + path); } } @@ -160,19 +204,25 @@ private void handleGET(SolrQueryRequest req, SolrQueryResponse rsp) { } private static Set subPaths = new HashSet<>(Arrays.asList( - "/version", - "/uniquekey", - "/name", - "/similarity", - "/defaultsearchfield", - "/solrqueryparser", - "/zkversion", - "/solrqueryparser/defaultoperator" + "version", + "uniquekey", + "name", + "similarity", + "defaultsearchfield", + "solrqueryparser", + "zkversion" )); + static { + subPaths.addAll(level2.keySet()); + } @Override public SolrRequestHandler getSubHandler(String subPath) { - if (subPaths.contains(subPath)) return this; + List parts = StrUtils.splitSmart(subPath, '/'); + if (parts.get(0).isEmpty()) parts.remove(0); + String prefix = parts.get(0); + if(subPaths.contains(prefix)) return this; + return null; } diff --git a/solr/core/src/java/org/apache/solr/rest/SolrSchemaRestApi.java b/solr/core/src/java/org/apache/solr/rest/SolrSchemaRestApi.java index 0e40f73c8b2d..1310198f8ca2 100644 --- a/solr/core/src/java/org/apache/solr/rest/SolrSchemaRestApi.java +++ b/solr/core/src/java/org/apache/solr/rest/SolrSchemaRestApi.java @@ -15,14 +15,14 @@ * limitations under the License. */ package org.apache.solr.rest; + +import java.lang.invoke.MethodHandles; +import java.util.Collections; +import java.util.HashSet; +import java.util.Locale; +import java.util.Set; + import org.apache.solr.request.SolrRequestInfo; -import org.apache.solr.rest.schema.CopyFieldCollectionResource; -import org.apache.solr.rest.schema.DynamicFieldCollectionResource; -import org.apache.solr.rest.schema.DynamicFieldResource; -import org.apache.solr.rest.schema.FieldCollectionResource; -import org.apache.solr.rest.schema.FieldResource; -import org.apache.solr.rest.schema.FieldTypeCollectionResource; -import org.apache.solr.rest.schema.FieldTypeResource; import org.apache.solr.schema.IndexSchema; import org.restlet.Application; import org.restlet.Restlet; @@ -30,39 +30,18 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.Collections; -import java.util.HashSet; -import java.util.Locale; -import java.util.Set; - /** * Restlet servlet handling /<context>/<collection>/schema/* URL paths */ public class SolrSchemaRestApi extends Application { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - public static final String FIELDS_PATH = "/" + IndexSchema.FIELDS; - - public static final String DYNAMIC_FIELDS = IndexSchema.DYNAMIC_FIELDS.toLowerCase(Locale.ROOT); - public static final String DYNAMIC_FIELDS_PATH = "/" + DYNAMIC_FIELDS; - - public static final String FIELDTYPES = IndexSchema.FIELD_TYPES.toLowerCase(Locale.ROOT); - public static final String FIELDTYPES_PATH = "/" + FIELDTYPES; - public static final String NAME_SEGMENT = "/{" + IndexSchema.NAME.toLowerCase(Locale.ROOT) + "}"; - - public static final String COPY_FIELDS = IndexSchema.COPY_FIELDS.toLowerCase(Locale.ROOT); - public static final String COPY_FIELDS_PATH = "/" + COPY_FIELDS; - + /** * Returns reserved endpoints under /schema */ public static Set getReservedEndpoints() { Set reservedEndpoints = new HashSet<>(); - reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + FIELDS_PATH); - reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + DYNAMIC_FIELDS_PATH); - reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + FIELDTYPES_PATH); - reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + COPY_FIELDS_PATH); return Collections.unmodifiableSet(reservedEndpoints); } @@ -88,25 +67,6 @@ public synchronized Restlet createInboundRoot() { log.info("createInboundRoot started for /schema"); - router.attach(FIELDS_PATH, FieldCollectionResource.class); - // Allow a trailing slash on collection requests - router.attach(FIELDS_PATH + "/", FieldCollectionResource.class); - router.attach(FIELDS_PATH + NAME_SEGMENT, FieldResource.class); - - router.attach(DYNAMIC_FIELDS_PATH, DynamicFieldCollectionResource.class); - // Allow a trailing slash on collection requests - router.attach(DYNAMIC_FIELDS_PATH + "/", DynamicFieldCollectionResource.class); - router.attach(DYNAMIC_FIELDS_PATH + NAME_SEGMENT, DynamicFieldResource.class); - - router.attach(FIELDTYPES_PATH, FieldTypeCollectionResource.class); - // Allow a trailing slash on collection requests - router.attach(FIELDTYPES_PATH + "/", FieldTypeCollectionResource.class); - router.attach(FIELDTYPES_PATH + NAME_SEGMENT, FieldTypeResource.class); - - router.attach(COPY_FIELDS_PATH, CopyFieldCollectionResource.class); - // Allow a trailing slash on collection requests - router.attach(COPY_FIELDS_PATH + "/", CopyFieldCollectionResource.class); - router.attachDefault(RestManager.ManagedEndpoint.class); // attach all the dynamically registered schema resources diff --git a/solr/core/src/java/org/apache/solr/rest/schema/BaseFieldResource.java b/solr/core/src/java/org/apache/solr/rest/schema/BaseFieldResource.java deleted file mode 100644 index 25f631d5e89e..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/BaseFieldResource.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.cloud.ZkSolrResourceLoader; -import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.core.CoreDescriptor; -import org.apache.solr.rest.BaseSolrResource; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.apache.solr.schema.SchemaField; -import org.restlet.resource.ResourceException; - -import java.util.LinkedHashSet; -import java.util.Map; - - -/** - * Base class for Schema Field and DynamicField requests. - */ -abstract class BaseFieldResource extends BaseSolrResource { - protected static final String INCLUDE_DYNAMIC_PARAM = "includeDynamic"; - private static final String DYNAMIC_BASE = "dynamicBase"; - - private LinkedHashSet requestedFields; - private boolean showDefaults; - - protected LinkedHashSet getRequestedFields() { - return requestedFields; - } - - - protected BaseFieldResource() { - super(); - } - - /** - * Pulls the "fl" param from the request and splits it to get the - * requested list of fields. The (Dynamic)FieldCollectionResource classes - * will then restrict the fields sent back in the response to those - * on this list. The (Dynamic)FieldResource classes ignore this list, - * since the (dynamic) field is specified in the URL path, rather than - * in a query parameter. - *

    - * Also pulls the "showDefaults" param from the request, for use by all - * subclasses to include default values from the associated field type - * in the response. By default this param is off. - */ - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - String flParam = getSolrRequest().getParams().get(CommonParams.FL); - if (null != flParam) { - String[] fields = flParam.trim().split("[,\\s]+"); - if (fields.length > 0) { - requestedFields = new LinkedHashSet<>(); - for (String field : fields) { - if ( ! field.trim().isEmpty()) { - requestedFields.add(field.trim()); - } - } - } - } - showDefaults = getSolrRequest().getParams().getBool(SHOW_DEFAULTS, false); - } - } - - /** Get the properties for a given field. - * - * @param field not required to exist in the schema - */ - protected SimpleOrderedMap getFieldProperties(SchemaField field) { - if (null == field) { - return null; - } - SimpleOrderedMap properties = field.getNamedPropertyValues(showDefaults); - if ( ! getSchema().getFields().containsKey(field.getName())) { - String dynamicBase = getSchema().getDynamicPattern(field.getName()); - // Add dynamicBase property if it's different from the field name. - if ( ! field.getName().equals(dynamicBase)) { - properties.add(DYNAMIC_BASE, dynamicBase); - } - } - if (field == getSchema().getUniqueKeyField()) { - properties.add(IndexSchema.UNIQUE_KEY, true); - } - return properties; - } - - /** - * When running in cloud mode, waits for a schema update to be - * applied by all active replicas of the current collection. - */ - protected void waitForSchemaUpdateToPropagate(IndexSchema newSchema) { - // If using ZooKeeper and the client application has requested an update timeout, then block until all - // active replicas for this collection process the updated schema - if (getUpdateTimeoutSecs() > 0 && newSchema != null && - newSchema.getResourceLoader() instanceof ZkSolrResourceLoader) - { - CoreDescriptor cd = getSolrCore().getCoreDescriptor(); - String collection = cd.getCollectionName(); - if (collection != null) { - ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader) newSchema.getResourceLoader(); - ManagedIndexSchema.waitForSchemaZkVersionAgreement(collection, - cd.getCloudDescriptor().getCoreNodeName(), - ((ManagedIndexSchema) newSchema).getSchemaZkVersion(), - zkLoader.getZkController(), - getUpdateTimeoutSecs()); - } - } - } - - // protected access on this class triggers a bug in javadoc generation caught by - // documentation-link: "BROKEN LINK" reported in javadoc for classes using - // NewFieldArguments because the link target file is BaseFieldResource.NewFieldArguments, - // but the actual file is BaseFieldResource$NewFieldArguments. - static class NewFieldArguments { - private String name; - private String type; - Map map; - NewFieldArguments(String name, String type, Map map) { - this.name = name; - this.type = type; - this.map = map; - } - - public String getName() { return name; } - public String getType() { return type; } - public Map getMap() { return map; } - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/BaseFieldTypeResource.java b/solr/core/src/java/org/apache/solr/rest/schema/BaseFieldTypeResource.java deleted file mode 100644 index c475dd0c3238..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/BaseFieldTypeResource.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; - -import org.apache.solr.cloud.ZkSolrResourceLoader; -import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.core.CoreDescriptor; -import org.apache.solr.rest.BaseSolrResource; -import org.apache.solr.schema.FieldType; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.restlet.resource.ResourceException; - -import java.util.List; - -/** - * Base class for the FieldType resource classes. - */ -abstract class BaseFieldTypeResource extends BaseSolrResource { - private boolean showDefaults; - - protected BaseFieldTypeResource() { - super(); - } - - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - showDefaults = getSolrRequest().getParams().getBool(SHOW_DEFAULTS, false); - } - } - - /** Used by subclasses to collect field type properties */ - protected SimpleOrderedMap getFieldTypeProperties(FieldType fieldType) { - SimpleOrderedMap properties = fieldType.getNamedPropertyValues(showDefaults); - properties.add(IndexSchema.FIELDS, getFieldsWithFieldType(fieldType)); - properties.add(IndexSchema.DYNAMIC_FIELDS, getDynamicFieldsWithFieldType(fieldType)); - return properties; - } - - - /** Return a list of names of Fields that have the given FieldType */ - protected abstract List getFieldsWithFieldType(FieldType fieldType); - - /** Return a list of names of DynamicFields that have the given FieldType */ - protected abstract List getDynamicFieldsWithFieldType(FieldType fieldType); - - /** - * Adds one or more new FieldType definitions to the managed schema for the given core. - */ - protected void addNewFieldTypes(List newFieldTypes, ManagedIndexSchema oldSchema) { - IndexSchema newSchema = null; - boolean success = false; - while (!success) { - try { - synchronized (oldSchema.getSchemaUpdateLock()) { - newSchema = oldSchema.addFieldTypes(newFieldTypes, true); - getSolrCore().setLatestSchema(newSchema); - success = true; - } - } catch (ManagedIndexSchema.SchemaChangedInZkException e) { - oldSchema = (ManagedIndexSchema)getSolrCore().getLatestSchema(); - } - } - - // If using ZooKeeper and the client application has requested an update timeout, then block until all - // active replicas for this collection process the updated schema - if (getUpdateTimeoutSecs() > 0 && newSchema != null && - newSchema.getResourceLoader() instanceof ZkSolrResourceLoader) - { - CoreDescriptor cd = getSolrCore().getCoreDescriptor(); - String collection = cd.getCollectionName(); - if (collection != null) { - ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader) newSchema.getResourceLoader(); - ManagedIndexSchema.waitForSchemaZkVersionAgreement(collection, - cd.getCloudDescriptor().getCoreNodeName(), - ((ManagedIndexSchema) newSchema).getSchemaZkVersion(), - zkLoader.getZkController(), - getUpdateTimeoutSecs()); - } - } - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/CopyFieldCollectionResource.java b/solr/core/src/java/org/apache/solr/rest/schema/CopyFieldCollectionResource.java deleted file mode 100644 index 610c0542af92..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/CopyFieldCollectionResource.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.params.CommonParams; -import org.apache.solr.rest.GETable; -import org.apache.solr.rest.POSTable; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.noggit.ObjectBuilder; -import org.restlet.data.MediaType; -import org.restlet.representation.Representation; -import org.restlet.resource.ResourceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.lang.invoke.MethodHandles; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.apache.solr.common.SolrException.ErrorCode; - -/** - * This class responds to requests at /solr/(corename)/schema/copyfields - *

    - * - * To restrict the set of copyFields in the response, specify one or both - * of the following as query parameters, with values as space and/or comma - * separated dynamic or explicit field names: - * - *

      - *
    • dest.fl: include copyFields that have one of these as a destination
    • - *
    • source.fl: include copyFields that have one of these as a source
    • - *
    - * - * If both dest.fl and source.fl are given as query parameters, the copyfields - * in the response will be restricted to those that match any of the destinations - * in dest.fl and also match any of the sources in source.fl. - */ -public class CopyFieldCollectionResource extends BaseFieldResource implements GETable, POSTable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final String SOURCE_FIELD_LIST = IndexSchema.SOURCE + "." + CommonParams.FL; - private static final String DESTINATION_FIELD_LIST = IndexSchema.DESTINATION + "." + CommonParams.FL; - - private Set requestedSourceFields; - private Set requestedDestinationFields; - - public CopyFieldCollectionResource() { - super(); - } - - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - String sourceFieldListParam = getSolrRequest().getParams().get(SOURCE_FIELD_LIST); - if (null != sourceFieldListParam) { - String[] fields = sourceFieldListParam.trim().split("[,\\s]+"); - if (fields.length > 0) { - requestedSourceFields = new HashSet<>(Arrays.asList(fields)); - requestedSourceFields.remove(""); // Remove empty values, if any - } - } - String destinationFieldListParam = getSolrRequest().getParams().get(DESTINATION_FIELD_LIST); - if (null != destinationFieldListParam) { - String[] fields = destinationFieldListParam.trim().split("[,\\s]+"); - if (fields.length > 0) { - requestedDestinationFields = new HashSet<>(Arrays.asList(fields)); - requestedDestinationFields.remove(""); // Remove empty values, if any - } - } - } - } - - @Override - public Representation get() { - try { - getSolrResponse().add(IndexSchema.COPY_FIELDS, - getSchema().getCopyFieldProperties(true, requestedSourceFields, requestedDestinationFields)); - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - @Override - public Representation post(Representation entity) throws ResourceException { - try { - if (!getSchema().isMutable()) { - final String message = "This IndexSchema is not mutable."; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) { - String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted." - + " Request has media type " + entity.getMediaType().toString() + "."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - Object object = ObjectBuilder.fromJSON(entity.getText()); - - if (!(object instanceof List)) { - String message = "Invalid JSON type " + object.getClass().getName() + ", expected List of the form" - + " (ignore the backslashes): [{\"source\":\"foo\",\"dest\":\"comma-separated list of targets\"}, {...}, ...]"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - List> list = (List>) object; - Map> fieldsToCopy = new HashMap<>(); - ManagedIndexSchema oldSchema = (ManagedIndexSchema) getSchema(); - Set malformed = new HashSet<>(); - for (Map map : list) { - String fieldName = (String)map.get(IndexSchema.SOURCE); - if (null == fieldName) { - String message = "Missing '" + IndexSchema.SOURCE + "' mapping."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - Object dest = map.get(IndexSchema.DESTINATION); - List destinations = null; - if (dest != null) { - if (dest instanceof List){ - destinations = (List)dest; - } else if (dest instanceof String){ - destinations = Collections.singletonList(dest.toString()); - } else { - String message = "Invalid '" + IndexSchema.DESTINATION + "' type."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - } - if (destinations == null) { - malformed.add(fieldName); - } else { - fieldsToCopy.put(fieldName, destinations); - } - } - if (malformed.size() > 0){ - StringBuilder message = new StringBuilder("Malformed destination(s) for: "); - for (String s : malformed) { - message.append(s).append(", "); - } - if (message.length() > 2) { - message.setLength(message.length() - 2);//drop the last , - } - log.error(message.toString().trim()); - throw new SolrException(ErrorCode.BAD_REQUEST, message.toString().trim()); - } - IndexSchema newSchema = null; - boolean success = false; - while (!success) { - try { - synchronized (oldSchema.getSchemaUpdateLock()) { - newSchema = oldSchema.addCopyFields(fieldsToCopy,true); - if (null != newSchema) { - getSolrCore().setLatestSchema(newSchema); - success = true; - } else { - throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to add fields."); - } - } - } catch (ManagedIndexSchema.SchemaChangedInZkException e) { - log.debug("Schema changed while processing request, retrying"); - oldSchema = (ManagedIndexSchema)getSolrCore().getLatestSchema(); - } - } - waitForSchemaUpdateToPropagate(newSchema); - } - } - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - return new SolrOutputRepresentation(); - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldCollectionResource.java b/solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldCollectionResource.java deleted file mode 100644 index bf9423404b78..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldCollectionResource.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.rest.GETable; -import org.apache.solr.rest.POSTable; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.apache.solr.schema.SchemaField; -import org.noggit.ObjectBuilder; -import org.restlet.data.MediaType; -import org.restlet.representation.Representation; -import org.restlet.resource.ResourceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * This class responds to requests at /solr/(corename)/schema/dynamicfields - *

    - * To restrict the set of dynamic fields in the response, specify a comma - * and/or space separated list of dynamic field patterns in the "fl" query - * parameter. - */ -public class DynamicFieldCollectionResource extends BaseFieldResource implements GETable, POSTable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - public DynamicFieldCollectionResource() { - super(); - } - - @Override - public void doInit() throws ResourceException { - super.doInit(); - } - - @Override - public Representation get() { - - try { - List> props = new ArrayList<>(); - if (null == getRequestedFields()) { - for (IndexSchema.DynamicField dynamicField : getSchema().getDynamicFields()) { - if ( ! dynamicField.getRegex().startsWith(IndexSchema.INTERNAL_POLY_FIELD_PREFIX)) { // omit internal polyfields - props.add(getFieldProperties(dynamicField.getPrototype())); - } - } - } else { - if (0 == getRequestedFields().size()) { - String message = "Empty " + CommonParams.FL + " parameter value"; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - Map dynamicFieldsByName = new HashMap<>(); - for (IndexSchema.DynamicField dynamicField : getSchema().getDynamicFields()) { - dynamicFieldsByName.put(dynamicField.getRegex(), dynamicField.getPrototype()); - } - // Use the same order as the fl parameter - for (String dynamicFieldName : getRequestedFields()) { - final SchemaField dynamicSchemaField = dynamicFieldsByName.get(dynamicFieldName); - if (null == dynamicSchemaField) { - log.info("Requested dynamic field '" + dynamicFieldName + "' not found."); - } else { - props.add(getFieldProperties(dynamicSchemaField)); - } - } - } - getSolrResponse().add(IndexSchema.DYNAMIC_FIELDS, props); - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - @Override - public Representation post(Representation entity) { - try { - if ( ! getSchema().isMutable()) { - final String message = "This IndexSchema is not mutable."; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - if (null == entity.getMediaType()) { - entity.setMediaType(MediaType.APPLICATION_JSON); - } - if ( ! entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) { - String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted." - + " Request has media type " + entity.getMediaType().toString() + "."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - Object object = ObjectBuilder.fromJSON(entity.getText()); - if ( ! (object instanceof List)) { - String message = "Invalid JSON type " + object.getClass().getName() + ", expected List of the form" - + " (ignore the backslashes): [{\"name\":\"*_foo\",\"type\":\"text_general\", ...}, {...}, ...]"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - List> list = (List>)object; - List newDynamicFields = new ArrayList<>(); - List newDynamicFieldArguments = new ArrayList<>(); - ManagedIndexSchema oldSchema = (ManagedIndexSchema)getSchema(); - Map> copyFields = new HashMap<>(); - for (Map map : list) { - String fieldNamePattern = (String)map.remove(IndexSchema.NAME); - if (null == fieldNamePattern) { - String message = "Missing '" + IndexSchema.NAME + "' mapping."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - String fieldType = (String)map.remove(IndexSchema.TYPE); - if (null == fieldType) { - String message = "Missing '" + IndexSchema.TYPE + "' mapping."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - // copyFields:"comma separated list of destination fields" - Object copies = map.get(IndexSchema.COPY_FIELDS); - List copyTo = null; - if (copies != null) { - if (copies instanceof List){ - copyTo = (List)copies; - } else if (copies instanceof String){ - copyTo = Collections.singletonList(copies.toString()); - } else { - String message = "Invalid '" + IndexSchema.COPY_FIELDS + "' type."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - } - if (copyTo != null) { - map.remove(IndexSchema.COPY_FIELDS); - copyFields.put(fieldNamePattern, copyTo); - } - newDynamicFields.add(oldSchema.newDynamicField(fieldNamePattern, fieldType, map)); - newDynamicFieldArguments.add(new NewFieldArguments(fieldNamePattern, fieldType, map)); - } - IndexSchema newSchema = null; - boolean firstAttempt = true; - boolean success = false; - while ( ! success) { - try { - if ( ! firstAttempt) { - // If this isn't the first attempt, we must have failed due to - // the schema changing in Zk during optimistic concurrency control. - // In that case, rerun creating the new fields, because they may - // fail now due to changes in the schema. This behavior is consistent - // with what would happen if we locked the schema and the other schema - // change went first. - newDynamicFields.clear(); - for (NewFieldArguments args : newDynamicFieldArguments) { - newDynamicFields.add(oldSchema.newDynamicField(args.getName(), args.getType(), args.getMap())); - } - } - firstAttempt = false; - synchronized (oldSchema.getSchemaUpdateLock()) { - newSchema = oldSchema.addDynamicFields(newDynamicFields, copyFields, true); - if (null != newSchema) { - getSolrCore().setLatestSchema(newSchema); - success = true; - } else { - throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to add dynamic fields."); - } - } - } catch (ManagedIndexSchema.SchemaChangedInZkException e) { - log.debug("Schema changed while processing request, retrying"); - oldSchema = (ManagedIndexSchema)getSolrCore().getLatestSchema(); - } - } - - waitForSchemaUpdateToPropagate(newSchema); - - } - } - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldResource.java b/solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldResource.java deleted file mode 100644 index bf67608cc496..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/DynamicFieldResource.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.rest.GETable; -import org.apache.solr.rest.PUTable; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.apache.solr.schema.SchemaField; -import org.noggit.ObjectBuilder; -import org.restlet.data.MediaType; -import org.restlet.representation.Representation; -import org.restlet.resource.ResourceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.UnsupportedEncodingException; -import java.lang.invoke.MethodHandles; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; - -/** - * This class responds to requests at /solr/(corename)/schema/dynamicfields/(pattern) - * where pattern is a field name pattern (with an asterisk at the beginning or the end). - */ -public class DynamicFieldResource extends BaseFieldResource implements GETable, PUTable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private String fieldNamePattern; - - public DynamicFieldResource() { - super(); - } - - /** - * Gets the field name pattern from the request attribute where it's stored by Restlet. - */ - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - fieldNamePattern = (String)getRequestAttributes().get(IndexSchema.NAME); - try { - fieldNamePattern = null == fieldNamePattern ? "" : urlDecode(fieldNamePattern.trim()).trim(); - } catch (UnsupportedEncodingException e) { - throw new ResourceException(e); - } - } - } - - @Override - public Representation get() { - try { - if (fieldNamePattern.isEmpty()) { - final String message = "Dynamic field name is missing"; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - SchemaField field = null; - for (SchemaField prototype : getSchema().getDynamicFieldPrototypes()) { - if (prototype.getName().equals(fieldNamePattern)) { - field = prototype; - break; - } - } - if (null == field) { - final String message = "Dynamic field '" + fieldNamePattern + "' not found."; - throw new SolrException(ErrorCode.NOT_FOUND, message); - } else { - getSolrResponse().add(IndexSchema.DYNAMIC_FIELD, getFieldProperties(field)); - } - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - /** - * Accepts JSON add dynamic field request - */ - @Override - public Representation put(Representation entity) { - try { - if ( ! getSchema().isMutable()) { - final String message = "This IndexSchema is not mutable."; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - if (null == entity.getMediaType()) { - entity.setMediaType(MediaType.APPLICATION_JSON); - } - if ( ! entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) { - String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted." - + " Request has media type " + entity.getMediaType().toString() + "."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - Object object = ObjectBuilder.fromJSON(entity.getText()); - if ( ! (object instanceof Map)) { - String message = "Invalid JSON type " + object.getClass().getName() + ", expected Map of the form" - + " (ignore the backslashes): {\"type\":\"text_general\", ...}, either with or" - + " without a \"name\" mapping. If the \"name\" is specified, it must match the" - + " name given in the request URL: /schema/dynamicfields/(name)"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - Map map = (Map)object; - if (1 == map.size() && map.containsKey(IndexSchema.DYNAMIC_FIELD)) { - map = (Map)map.get(IndexSchema.DYNAMIC_FIELD); - } - String bodyFieldName; - if (null != (bodyFieldName = (String)map.remove(IndexSchema.NAME)) - && ! fieldNamePattern.equals(bodyFieldName)) { - String message = "Dynamic field name in the request body '" + bodyFieldName - + "' doesn't match dynamic field name in the request URL '" + fieldNamePattern + "'"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - String fieldType; - if (null == (fieldType = (String) map.remove(IndexSchema.TYPE))) { - String message = "Missing '" + IndexSchema.TYPE + "' mapping."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - ManagedIndexSchema oldSchema = (ManagedIndexSchema)getSchema(); - Object copies = map.get(IndexSchema.COPY_FIELDS); - Collection copyFieldNames = null; - if (copies != null) { - if (copies instanceof List) { - copyFieldNames = (List)copies; - } else if (copies instanceof String) { - copyFieldNames = singletonList(copies.toString()); - } else { - String message = "Invalid '" + IndexSchema.COPY_FIELDS + "' type."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - } - if (copyFieldNames != null) { - map.remove(IndexSchema.COPY_FIELDS); - } - IndexSchema newSchema = null; - boolean success = false; - while ( ! success) { - try { - SchemaField newDynamicField = oldSchema.newDynamicField(fieldNamePattern, fieldType, map); - synchronized (oldSchema.getSchemaUpdateLock()) { - newSchema = oldSchema.addDynamicFields(singletonList(newDynamicField), singletonMap(newDynamicField.getName(), copyFieldNames), true); - if (null != newSchema) { - getSolrCore().setLatestSchema(newSchema); - success = true; - } else { - throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to add dynamic field."); - } - } - } catch (ManagedIndexSchema.SchemaChangedInZkException e) { - log.debug("Schema changed while processing request, retrying"); - oldSchema = (ManagedIndexSchema)getSolrCore().getLatestSchema(); - } - } - // if in cloud mode, wait for schema updates to propagate to all replicas - waitForSchemaUpdateToPropagate(newSchema); - } - } - } - } - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldCollectionResource.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldCollectionResource.java deleted file mode 100644 index f1bf6a4d3729..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/FieldCollectionResource.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.cloud.ZkSolrResourceLoader; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.common.params.CommonParams; -import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.core.CoreDescriptor; -import org.apache.solr.rest.GETable; -import org.apache.solr.rest.POSTable; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.apache.solr.schema.SchemaField; -import org.noggit.ObjectBuilder; -import org.restlet.data.MediaType; -import org.restlet.representation.Representation; -import org.restlet.resource.ResourceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * This class responds to requests at /solr/(corename)/schema/fields - *

    - * Two query parameters are supported: - *

      - *
    • - * "fl": a comma- and/or space-separated list of fields to send properties - * for in the response, rather than the default: all of them. - *
    • - *
    • - * "includeDynamic": if the "fl" parameter is specified, matching dynamic - * fields are included in the response and identified with the "dynamicBase" - * property. If the "fl" parameter is not specified, the "includeDynamic" - * query parameter is ignored. - *
    • - *
    - */ -public class FieldCollectionResource extends BaseFieldResource implements GETable, POSTable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private boolean includeDynamic; - - public FieldCollectionResource() { - super(); - } - - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - includeDynamic = getSolrRequest().getParams().getBool(INCLUDE_DYNAMIC_PARAM, false); - } - } - - @Override - public Representation get() { - try { - final List> props = new ArrayList<>(); - if (null == getRequestedFields()) { - SortedSet fieldNames = new TreeSet<>(getSchema().getFields().keySet()); - for (String fieldName : fieldNames) { - props.add(getFieldProperties(getSchema().getFields().get(fieldName))); - } - } else { - if (0 == getRequestedFields().size()) { - String message = "Empty " + CommonParams.FL + " parameter value"; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - // Use the same order as the fl parameter - for (String fieldName : getRequestedFields()) { - final SchemaField field; - if (includeDynamic) { - field = getSchema().getFieldOrNull(fieldName); - } else { - field = getSchema().getFields().get(fieldName); - } - if (null == field) { - log.info("Requested field '" + fieldName + "' not found."); - } else { - props.add(getFieldProperties(field)); - } - } - } - getSolrResponse().add(IndexSchema.FIELDS, props); - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - @Override - public Representation post(Representation entity) { - try { - if (!getSchema().isMutable()) { - final String message = "This IndexSchema is not mutable."; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - if (null == entity.getMediaType()) { - entity.setMediaType(MediaType.APPLICATION_JSON); - } - if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) { - String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted." - + " Request has media type " + entity.getMediaType().toString() + "."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - Object object = ObjectBuilder.fromJSON(entity.getText()); - if (!(object instanceof List)) { - String message = "Invalid JSON type " + object.getClass().getName() + ", expected List of the form" - + " (ignore the backslashes): [{\"name\":\"foo\",\"type\":\"text_general\", ...}, {...}, ...]"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - List> list = (List>) object; - List newFields = new ArrayList<>(); - List newFieldArguments = new ArrayList<>(); - IndexSchema oldSchema = getSchema(); - Map> copyFields = new HashMap<>(); - for (Map map : list) { - String fieldName = (String) map.remove(IndexSchema.NAME); - if (null == fieldName) { - String message = "Missing '" + IndexSchema.NAME + "' mapping."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - String fieldType = (String) map.remove(IndexSchema.TYPE); - if (null == fieldType) { - String message = "Missing '" + IndexSchema.TYPE + "' mapping."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - // copyFields:"comma separated list of destination fields" - Object copies = map.get(IndexSchema.COPY_FIELDS); - List copyTo = null; - if (copies != null) { - if (copies instanceof List){ - copyTo = (List) copies; - } else if (copies instanceof String){ - copyTo = Collections.singletonList(copies.toString()); - } else { - String message = "Invalid '" + IndexSchema.COPY_FIELDS + "' type."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - } - if (copyTo != null) { - map.remove(IndexSchema.COPY_FIELDS); - copyFields.put(fieldName, copyTo); - } - newFields.add(oldSchema.newField(fieldName, fieldType, map)); - newFieldArguments.add(new NewFieldArguments(fieldName, fieldType, map)); - } - IndexSchema newSchema = null; - boolean firstAttempt = true; - boolean success = false; - while (!success) { - try { - if (!firstAttempt) { - // If this isn't the first attempt, we must have failed due to - // the schema changing in Zk during optimistic concurrency control. - // In that case, rerun creating the new fields, because they may - // fail now due to changes in the schema. This behavior is consistent - // with what would happen if we locked the schema and the other schema - // change went first. - newFields.clear(); - for (NewFieldArguments args : newFieldArguments) { - newFields.add(oldSchema.newField( - args.getName(), args.getType(), args.getMap())); - } - } - firstAttempt = false; - synchronized (oldSchema.getSchemaUpdateLock()) { - newSchema = oldSchema.addFields(newFields, copyFields, true); - if (null != newSchema) { - getSolrCore().setLatestSchema(newSchema); - success = true; - } else { - throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to add fields."); - } - } - } catch (ManagedIndexSchema.SchemaChangedInZkException e) { - log.debug("Schema changed while processing request, retrying"); - oldSchema = getSolrCore().getLatestSchema(); - } - } - waitForSchemaUpdateToPropagate(newSchema); - } - } - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldResource.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldResource.java deleted file mode 100644 index 2634bbdb759e..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/FieldResource.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.cloud.ZkSolrResourceLoader; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.core.CoreDescriptor; -import org.apache.solr.core.SolrResourceLoader; -import org.apache.solr.rest.GETable; -import org.apache.solr.rest.PUTable; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.apache.solr.schema.SchemaField; -import org.noggit.ObjectBuilder; -import org.restlet.data.MediaType; -import org.restlet.representation.Representation; -import org.restlet.resource.ResourceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.UnsupportedEncodingException; -import java.lang.invoke.MethodHandles; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; - -/** - * This class responds to requests at /solr/(corename)/schema/fields/(fieldname) - * where "fieldname" is the name of a field. - *

    - * The GET method returns properties for the given fieldname. - * The "includeDynamic" query parameter, if specified, will cause the - * dynamic field matching the given fieldname to be returned if fieldname - * is not explicitly declared in the schema. - *

    - * The PUT method accepts field addition requests in JSON format. - */ -public class FieldResource extends BaseFieldResource implements GETable, PUTable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private boolean includeDynamic; - private String fieldName; - - public FieldResource() { - super(); - } - - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - includeDynamic = getSolrRequest().getParams().getBool(INCLUDE_DYNAMIC_PARAM, false); - fieldName = (String) getRequestAttributes().get(IndexSchema.NAME); - try { - fieldName = null == fieldName ? "" : urlDecode(fieldName.trim()).trim(); - } catch (UnsupportedEncodingException e) { - throw new ResourceException(e); - } - } - } - - @Override - public Representation get() { - try { - if (fieldName.isEmpty()) { - final String message = "Field name is missing"; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - final SchemaField field; - if (includeDynamic) { - field = getSchema().getFieldOrNull(fieldName); - } else { - field = getSchema().getFields().get(fieldName); - } - if (null == field) { - final String message = "Field '" + fieldName + "' not found."; - throw new SolrException(ErrorCode.NOT_FOUND, message); - } else { - getSolrResponse().add(IndexSchema.FIELD, getFieldProperties(field)); - } - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - /** - * Accepts JSON add field request, to URL - */ - @Override - public Representation put(Representation entity) { - try { - if (!getSchema().isMutable()) { - final String message = "This IndexSchema is not mutable."; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - if (null == entity.getMediaType()) { - entity.setMediaType(MediaType.APPLICATION_JSON); - } - if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) { - String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted." - + " Request has media type " + entity.getMediaType().toString() + "."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - Object object = ObjectBuilder.fromJSON(entity.getText()); - if (!(object instanceof Map)) { - String message = "Invalid JSON type " + object.getClass().getName() + ", expected Map of the form" - + " (ignore the backslashes): {\"type\":\"text_general\", ...}, either with or" - + " without a \"name\" mapping. If the \"name\" is specified, it must match the" - + " name given in the request URL: /schema/fields/(name)"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - Map map = (Map) object; - if (1 == map.size() && map.containsKey(IndexSchema.FIELD)) { - map = (Map) map.get(IndexSchema.FIELD); - } - String bodyFieldName; - if (null != (bodyFieldName = (String) map.remove(IndexSchema.NAME)) && !fieldName.equals(bodyFieldName)) { - String message = "Field name in the request body '" + bodyFieldName - + "' doesn't match field name in the request URL '" + fieldName + "'"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - String fieldType; - if (null == (fieldType = (String) map.remove(IndexSchema.TYPE))) { - String message = "Missing '" + IndexSchema.TYPE + "' mapping."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - ManagedIndexSchema oldSchema = (ManagedIndexSchema) getSchema(); - Object copies = map.get(IndexSchema.COPY_FIELDS); - List copyFieldNames = null; - if (copies != null) { - if (copies instanceof List) { - copyFieldNames = (List) copies; - } else if (copies instanceof String) { - copyFieldNames = Collections.singletonList(copies.toString()); - } else { - String message = "Invalid '" + IndexSchema.COPY_FIELDS + "' type."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - } - if (copyFieldNames != null) { - map.remove(IndexSchema.COPY_FIELDS); - } - - IndexSchema newSchema = null; - boolean success = false; - while (!success) { - try { - SchemaField newField = oldSchema.newField(fieldName, fieldType, map); - synchronized (oldSchema.getSchemaUpdateLock()) { - newSchema = oldSchema.addField(newField, copyFieldNames); - if (null != newSchema) { - getSolrCore().setLatestSchema(newSchema); - success = true; - } else { - throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to add field."); - } - } - } catch (ManagedIndexSchema.SchemaChangedInZkException e) { - log.debug("Schema changed while processing request, retrying"); - oldSchema = (ManagedIndexSchema)getSolrCore().getLatestSchema(); - } - } - waitForSchemaUpdateToPropagate(newSchema); - } - } - } - } - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeCollectionResource.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeCollectionResource.java deleted file mode 100644 index d2eb1bd812d2..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeCollectionResource.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.common.util.SimpleOrderedMap; -import org.apache.solr.rest.GETable; -import org.apache.solr.rest.POSTable; -import org.apache.solr.schema.FieldType; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.apache.solr.schema.SchemaField; -import org.noggit.ObjectBuilder; -import org.restlet.data.MediaType; -import org.restlet.representation.Representation; -import org.restlet.resource.ResourceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -/** - * This class responds to requests at /solr/(corename)/schema/fieldtypes - * - * The GET method returns properties for all field types defined in the schema. - */ -public class FieldTypeCollectionResource extends BaseFieldTypeResource implements GETable, POSTable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private Map> fieldsByFieldType; - private Map> dynamicFieldsByFieldType; - - public FieldTypeCollectionResource() { - super(); - } - - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - fieldsByFieldType = getFieldsByFieldType(); - dynamicFieldsByFieldType = getDynamicFieldsByFieldType(); - } - } - - @Override - public Representation get() { - try { - List> props = new ArrayList<>(); - Map sortedFieldTypes = new TreeMap<>(getSchema().getFieldTypes()); - for (FieldType fieldType : sortedFieldTypes.values()) { - props.add(getFieldTypeProperties(fieldType)); - } - getSolrResponse().add(IndexSchema.FIELD_TYPES, props); - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - /** Returns field lists from the map constructed in doInit() */ - @Override - protected List getFieldsWithFieldType(FieldType fieldType) { - List fields = fieldsByFieldType.get(fieldType.getTypeName()); - if (null == fields) { - fields = Collections.emptyList(); - } - return fields; - } - - /** Returns dynamic field lists from the map constructed in doInit() */ - @Override - protected List getDynamicFieldsWithFieldType(FieldType fieldType) { - List dynamicFields = dynamicFieldsByFieldType.get(fieldType.getTypeName()); - if (null == dynamicFields) { - dynamicFields = Collections.emptyList(); - } - return dynamicFields; - } - - /** - * Returns a map from field type names to a sorted list of fields that use the field type. - * The map only includes field types that are used by at least one field. - */ - private Map> getFieldsByFieldType() { - Map> fieldsByFieldType = new HashMap<>(); - for (SchemaField schemaField : getSchema().getFields().values()) { - final String fieldType = schemaField.getType().getTypeName(); - List fields = fieldsByFieldType.get(fieldType); - if (null == fields) { - fields = new ArrayList<>(); - fieldsByFieldType.put(fieldType, fields); - } - fields.add(schemaField.getName()); - } - for (List fields : fieldsByFieldType.values()) { - Collections.sort(fields); - } - return fieldsByFieldType; - } - - /** - * Returns a map from field type names to a list of dynamic fields that use the field type. - * The map only includes field types that are used by at least one dynamic field. - */ - private Map> getDynamicFieldsByFieldType() { - Map> dynamicFieldsByFieldType = new HashMap<>(); - for (SchemaField schemaField : getSchema().getDynamicFieldPrototypes()) { - final String fieldType = schemaField.getType().getTypeName(); - List dynamicFields = dynamicFieldsByFieldType.get(fieldType); - if (null == dynamicFields) { - dynamicFields = new ArrayList<>(); - dynamicFieldsByFieldType.put(fieldType, dynamicFields); - } - dynamicFields.add(schemaField.getName()); - } - return dynamicFieldsByFieldType; - } - - @SuppressWarnings("unchecked") - @Override - public Representation post(Representation entity) { - try { - if (!getSchema().isMutable()) { - final String message = "This IndexSchema is not mutable."; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - - if (null == entity.getMediaType()) - entity.setMediaType(MediaType.APPLICATION_JSON); - - if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) { - String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted." - + " Request has media type " + entity.getMediaType().toString() + "."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - - Object object = ObjectBuilder.fromJSON(entity.getText()); - if (!(object instanceof List)) { - String message = "Invalid JSON type " + object.getClass().getName() - + ", expected List of field type definitions in the form of" - + " (ignore the backslashes): [{\"name\":\"text_general\",\"class\":\"solr.TextField\", ...}, {...}, ...]"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - - List> fieldTypeList = (List>) object; - if (fieldTypeList.size() > 0) - addOrUpdateFieldTypes(fieldTypeList); - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - @SuppressWarnings("unchecked") - protected void addOrUpdateFieldTypes(List> fieldTypeList) throws Exception { - List newFieldTypes = new ArrayList<>(fieldTypeList.size()); - ManagedIndexSchema oldSchema = (ManagedIndexSchema) getSchema(); - for (Map fieldTypeJson : fieldTypeList) { - if (1 == fieldTypeJson.size() && fieldTypeJson.containsKey(IndexSchema.FIELD_TYPE)) { - fieldTypeJson = (Map) fieldTypeJson.get(IndexSchema.FIELD_TYPE); - } - FieldType newFieldType = - FieldTypeResource.buildFieldTypeFromJson(oldSchema, - (String)fieldTypeJson.get(IndexSchema.NAME), fieldTypeJson); - newFieldTypes.add(newFieldType); - } - // now deploy the added types (all or nothing) - addNewFieldTypes(newFieldTypes, oldSchema); - } -} diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeResource.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeResource.java deleted file mode 100644 index 361c8c216baa..000000000000 --- a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeResource.java +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.rest.GETable; -import org.apache.solr.rest.PUTable; -import org.apache.solr.schema.FieldType; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.ManagedIndexSchema; -import org.apache.solr.schema.SchemaField; -import org.noggit.ObjectBuilder; -import org.restlet.data.MediaType; -import org.restlet.representation.Representation; -import org.restlet.resource.ResourceException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.UnsupportedEncodingException; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -/** - * This class responds to requests at /solr/(corename)/schema/fieldtype/(typename) - * where "typename" is the name of a field type in the schema. - * - * The GET method returns properties for the named field type. - */ -public class FieldTypeResource extends BaseFieldTypeResource implements GETable, PUTable { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private String typeName; - - public FieldTypeResource() { - super(); - } - - @Override - public void doInit() throws ResourceException { - super.doInit(); - if (isExisting()) { - typeName = (String)getRequestAttributes().get(IndexSchema.NAME); - try { - typeName = null == typeName ? "" : urlDecode(typeName.trim()).trim(); - } catch (UnsupportedEncodingException e) { - throw new ResourceException(e); - } - } - } - - @Override - public Representation get() { - try { - if (typeName.isEmpty()) { - final String message = "Field type name is missing"; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } else { - FieldType fieldType = getSchema().getFieldTypes().get(typeName); - if (null == fieldType) { - final String message = "Field type '" + typeName + "' not found."; - throw new SolrException(ErrorCode.NOT_FOUND, message); - } - getSolrResponse().add(IndexSchema.FIELD_TYPE, getFieldTypeProperties(fieldType)); - } - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - /** - * Returns a field list using the given field type by iterating over all fields - * defined in the schema. - */ - @Override - protected List getFieldsWithFieldType(FieldType fieldType) { - List fields = new ArrayList<>(); - for (SchemaField schemaField : getSchema().getFields().values()) { - if (schemaField.getType().getTypeName().equals(fieldType.getTypeName())) { - fields.add(schemaField.getName()); - } - } - Collections.sort(fields); - return fields; - } - - /** - * Returns a dynamic field list using the given field type by iterating over all - * dynamic fields defined in the schema. - */ - @Override - protected List getDynamicFieldsWithFieldType(FieldType fieldType) { - List dynamicFields = new ArrayList<>(); - for (SchemaField prototype : getSchema().getDynamicFieldPrototypes()) { - if (prototype.getType().getTypeName().equals(fieldType.getTypeName())) { - dynamicFields.add(prototype.getName()); - } - } - return dynamicFields; // Don't sort these - they're matched in order - } - - /** - * Accepts JSON add fieldtype request, to URL - */ - @SuppressWarnings("unchecked") - @Override - public Representation put(Representation entity) { - try { - if (!getSchema().isMutable()) { - final String message = "This IndexSchema is not mutable."; - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - - if (null == entity.getMediaType()) - entity.setMediaType(MediaType.APPLICATION_JSON); - - if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) { - String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted." - + " Request has media type " + entity.getMediaType().toString() + "."; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - - Object object = ObjectBuilder.fromJSON(entity.getText()); - if (!(object instanceof Map)) { - String message = "Invalid JSON type " + object.getClass().getName() + ", expected Map of the form" - + " (ignore the backslashes): {\"name\":\"text_general\", \"class\":\"solr.TextField\" ...}," - + " either with or without a \"name\" mapping. If the \"name\" is specified, it must match the" - + " name given in the request URL: /schema/fieldtypes/(name)"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - - // basic validation passed, let's try to create it! - addOrUpdateFieldType((Map)object); - - } catch (Exception e) { - getSolrResponse().setException(e); - } - handlePostExecution(log); - - return new SolrOutputRepresentation(); - } - - protected void addOrUpdateFieldType(Map fieldTypeJson) { - ManagedIndexSchema oldSchema = (ManagedIndexSchema) getSchema(); - FieldType newFieldType = buildFieldTypeFromJson(oldSchema, typeName, fieldTypeJson); - addNewFieldTypes(Collections.singletonList(newFieldType), oldSchema); - } - - /** - * Builds a FieldType definition from a JSON object. - */ - @SuppressWarnings("unchecked") - static FieldType buildFieldTypeFromJson(ManagedIndexSchema oldSchema, String fieldTypeName, Map fieldTypeJson) { - if (1 == fieldTypeJson.size() && fieldTypeJson.containsKey(IndexSchema.FIELD_TYPE)) { - fieldTypeJson = (Map)fieldTypeJson.get(IndexSchema.FIELD_TYPE); - } - - String bodyTypeName = (String) fieldTypeJson.get(IndexSchema.NAME); - if (bodyTypeName == null) { - // must provide the name in the JSON for converting to the XML format needed - // to create FieldType objects using the FieldTypePluginLoader - fieldTypeJson.put(IndexSchema.NAME, fieldTypeName); - } else { - // if they provide it in the JSON, then it must match the value from the path - if (!fieldTypeName.equals(bodyTypeName)) { - String message = "Field type name in the request body '" + bodyTypeName - + "' doesn't match field type name in the request URL '" + fieldTypeName + "'"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - } - - String className = (String)fieldTypeJson.get(FieldType.CLASS_NAME); - if (className == null) { - String message = "Missing required '" + FieldType.CLASS_NAME + "' property!"; - log.error(message); - throw new SolrException(ErrorCode.BAD_REQUEST, message); - } - - return oldSchema.newFieldType(fieldTypeName, className, fieldTypeJson); - } -} diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java index acc8c1302714..4319c3ebba33 100644 --- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java @@ -39,6 +39,7 @@ import org.apache.lucene.util.Version; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; @@ -1352,6 +1353,10 @@ public boolean isCopyFieldTarget( SchemaField f ) { * Get a map of property name -> value for the whole schema. */ public SimpleOrderedMap getNamedPropertyValues() { + return getNamedPropertyValues(new MapSolrParams(Collections.EMPTY_MAP)); + + } + public SimpleOrderedMap getNamedPropertyValues(SolrParams params) { SimpleOrderedMap topLevel = new SimpleOrderedMap<>(); topLevel.add(NAME, getSchemaName()); topLevel.add(VERSION, getVersion()); @@ -1372,19 +1377,19 @@ public SimpleOrderedMap getNamedPropertyValues() { List> fieldTypeProperties = new ArrayList<>(); SortedMap sortedFieldTypes = new TreeMap<>(fieldTypes); for (FieldType fieldType : sortedFieldTypes.values()) { - fieldTypeProperties.add(fieldType.getNamedPropertyValues(false)); + fieldTypeProperties.add(fieldType.getNamedPropertyValues(params.getBool("showDefaults", false))); } topLevel.add(FIELD_TYPES, fieldTypeProperties); List> fieldProperties = new ArrayList<>(); SortedSet fieldNames = new TreeSet<>(fields.keySet()); for (String fieldName : fieldNames) { - fieldProperties.add(fields.get(fieldName).getNamedPropertyValues(false)); + fieldProperties.add(fields.get(fieldName).getNamedPropertyValues(params.getBool("showDefaults", false))); } topLevel.add(FIELDS, fieldProperties); List> dynamicFieldProperties = new ArrayList<>(); for (IndexSchema.DynamicField dynamicField : dynamicFields) { if ( ! dynamicField.getRegex().startsWith(INTERNAL_POLY_FIELD_PREFIX)) { // omit internal polyfields - dynamicFieldProperties.add(dynamicField.getPrototype().getNamedPropertyValues(false)); + dynamicFieldProperties.add(dynamicField.getPrototype().getNamedPropertyValues(params.getBool("showDefaults", false))); } } topLevel.add(DYNAMIC_FIELDS, dynamicFieldProperties); diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java index d87eb690a110..f291b2f97862 100644 --- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java +++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java @@ -35,10 +35,12 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.Set; +import com.google.common.collect.ImmutableSet; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.http.Header; @@ -84,6 +86,7 @@ import org.apache.solr.response.QueryResponseWriter; import org.apache.solr.response.QueryResponseWriterUtil; import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.schema.IndexSchema; import org.apache.solr.security.AuthenticationPlugin; import org.apache.solr.security.AuthorizationContext; import org.apache.solr.security.AuthorizationContext.CollectionRequest; diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestClassNameShortening.java b/solr/core/src/test/org/apache/solr/rest/schema/TestClassNameShortening.java index ddbd331ee876..52ba10b789fe 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestClassNameShortening.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestClassNameShortening.java @@ -18,13 +18,14 @@ import org.apache.solr.util.RestTestBase; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; import org.restlet.ext.servlet.ServerServlet; import java.util.SortedMap; import java.util.TreeMap; - +@Ignore public class TestClassNameShortening extends RestTestBase { @BeforeClass diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java index 5eeee8c7a334..c0f936d929e5 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java @@ -30,73 +30,51 @@ public void testGetAllCopyFields() throws Exception { +" and int[@name='maxChars'][.='200']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']" - +" and str[@name='destDynamicBase'][.='*_s']]", + +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" +" and str[@name='dest'][.='title']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" +" and str[@name='dest'][.='*_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='*_dest_sub_s']" - +" and str[@name='destDynamicBase'][.='*_s']]", + +" and str[@name='dest'][.='*_dest_sub_s']]", - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']" - +" and str[@name='destDynamicBase'][.='*_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + +" and str[@name='dest'][.='dest_sub_no_ast_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='sourceDynamicBase'][.='*_i']" +" and str[@name='dest'][.='title']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='sourceDynamicBase'][.='*_i']" +" and str[@name='dest'][.='*_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='sourceDynamicBase'][.='*_i']" - +" and str[@name='dest'][.='*_dest_sub_s']" - +" and str[@name='destDynamicBase'][.='*_s']]", + +" and str[@name='dest'][.='*_dest_sub_s']]", - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='sourceDynamicBase'][.='*_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']" - +" and str[@name='destDynamicBase'][.='*_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + +" and str[@name='dest'][.='dest_sub_no_ast_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='sourceDynamicBase'][.='*_i']" +" and str[@name='dest'][.='*_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='sourceDynamicBase'][.='*_i']" - +" and str[@name='dest'][.='*_dest_sub_s']" - +" and str[@name='destDynamicBase'][.='*_s']]", + +" and str[@name='dest'][.='*_dest_sub_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='sourceDynamicBase'][.='*_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']" - +" and str[@name='destDynamicBase'][.='*_s']]", + +" and str[@name='dest'][.='dest_sub_no_ast_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and arr[@name='sourceExplicitFields']/str[.='title_stemmed']" - +" and arr[@name='sourceExplicitFields']/str[.='title_lettertok']" +" and str[@name='dest'][.='text']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and arr[@name='sourceExplicitFields']/str[.='title_stemmed']" - +" and arr[@name='sourceExplicitFields']/str[.='title_lettertok']" +" and str[@name='dest'][.='*_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and arr[@name='sourceExplicitFields']/str[.='title_stemmed']" - +" and arr[@name='sourceExplicitFields']/str[.='title_lettertok']" +" and str[@name='dest'][.='*_dest_sub_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and arr[@name='sourceExplicitFields']/str[.='title_stemmed']" - +" and arr[@name='sourceExplicitFields']/str[.='title_lettertok']" +" and str[@name='dest'][.='dest_sub_no_ast_s']]"); } @@ -104,56 +82,22 @@ public void testGetAllCopyFields() throws Exception { public void testJsonGetAllCopyFields() throws Exception { assertJQ("/schema/copyfields?indent=on&wt=json", "/copyFields/[1]=={'source':'src_sub_no_ast_i','dest':'title'}", - "/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}", + "/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s'}", "/copyFields/[8]=={'source':'*_i','dest':'title'}", "/copyFields/[9]=={'source':'*_i','dest':'*_s'}", - "/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}", - "/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}", + "/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s'}", + "/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s'}", - "/copyFields/[12]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'title'}", - "/copyFields/[13]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_s'}", - "/copyFields/[14]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}", - "/copyFields/[15]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}", + "/copyFields/[12]=={'source':'*_src_sub_i','dest':'title'}", + "/copyFields/[13]=={'source':'*_src_sub_i','dest':'*_s'}", + "/copyFields/[14]=={'source':'*_src_sub_i','dest':'*_dest_sub_s'}", + "/copyFields/[15]=={'source':'*_src_sub_i','dest':'dest_sub_no_ast_s'}", - "/copyFields/[16]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'*_s'}", - "/copyFields/[17]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}", - "/copyFields/[18]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}"); + "/copyFields/[16]=={'source':'src_sub_no_ast_i','dest':'*_s'}", + "/copyFields/[17]=={'source':'src_sub_no_ast_i','dest':'*_dest_sub_s'}", + "/copyFields/[18]=={'source':'src_sub_no_ast_i','dest':'dest_sub_no_ast_s'}"); } - @Test - public void testRestrictSource() throws Exception { - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=title,*_i,*_src_sub_i,src_sub_no_ast_i", - "count(/response/arr[@name='copyFields']/lst) = 16", // 4 + 4 + 4 + 4 - "count(/response/arr[@name='copyFields']/lst/str[@name='source'][.='title']) = 4", - "count(/response/arr[@name='copyFields']/lst/str[@name='source'][.='*_i']) = 4", - "count(/response/arr[@name='copyFields']/lst/str[@name='source'][.='*_src_sub_i']) = 4", - "count(/response/arr[@name='copyFields']/lst/str[@name='source'][.='src_sub_no_ast_i']) = 4"); - } - - @Test - public void testRestrictDest() throws Exception { - assertQ("/schema/copyfields/?indent=on&wt=xml&dest.fl=title,*_s,*_dest_sub_s,dest_sub_no_ast_s", - "count(/response/arr[@name='copyFields']/lst) = 16", // 3 + 4 + 4 + 5 - "count(/response/arr[@name='copyFields']/lst/str[@name='dest'][.='title']) = 3", - "count(/response/arr[@name='copyFields']/lst/str[@name='dest'][.='*_s']) = 4", - "count(/response/arr[@name='copyFields']/lst/str[@name='dest'][.='*_dest_sub_s']) = 4", - "count(/response/arr[@name='copyFields']/lst/str[@name='dest'][.='dest_sub_no_ast_s']) = 5"); - } - - @Test - public void testRestrictSourceAndDest() throws Exception { - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=title,*_i&dest.fl=title,dest_sub_no_ast_s", - "count(/response/arr[@name='copyFields']/lst) = 3", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='title']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]"); - } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java index 318b28a78ad9..032bbad08ccd 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java @@ -29,21 +29,6 @@ public void testGetAllDynamicFields() throws Exception { "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[3] = '*_mfacet'"); } - @Test - public void testGetTwoDynamicFields() throws IOException { - assertQ("/schema/dynamicfields?indent=on&wt=xml&fl=*_i,*_s", - "count(/response/arr[@name='dynamicFields']/lst/str[@name='name']) = 2", - "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[1] = '*_i'", - "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[2] = '*_s'"); - } - - @Test - public void testNotFoundDynamicFields() throws IOException { - assertQ("/schema/dynamicfields?indent=on&wt=xml&fl=*_not_in_there,this_one_isnt_either_*", - "count(/response/arr[@name='dynamicFields']) = 1", - "count(/response/arr[@name='dynamicfields']/lst/str[@name='name']) = 0"); - } - @Test public void testJsonGetAllDynamicFields() throws Exception { assertJQ("/schema/dynamicfields?indent=on", @@ -51,18 +36,4 @@ public void testJsonGetAllDynamicFields() throws Exception { "/dynamicFields/[1]/name=='ignored_*'", "/dynamicFields/[2]/name=='*_mfacet'"); } - - @Test - public void testJsonGetTwoDynamicFields() throws Exception { - assertJQ("/schema/dynamicfields?indent=on&fl=*_i,*_s&wt=xml", // assertJQ will fix the wt param to be json - "/dynamicFields/[0]/name=='*_i'", - "/dynamicFields/[1]/name=='*_s'"); - } - - @Test - public void testJsonPostFieldsToNonMutableIndexSchema() throws Exception { - assertJPost("/schema/dynamicfields", - "[{\"name\":\"foobarbaz\", \"type\":\"text_general\", \"stored\":\"false\"}]", - "/error/msg=='This IndexSchema is not mutable.'"); - } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java index 54b17fcdd9fc..7ca7953d393d 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java @@ -67,11 +67,4 @@ public void testJsonGetDynamicField() throws Exception { "/dynamicField/required==false", "/dynamicField/tokenized==false"); } - - @Test - public void testJsonPutFieldToNonMutableIndexSchema() throws Exception { - assertJPut("/schema/dynamicfields/newfield_*", - "{\"type\":\"text_general\", \"stored\":\"false\"}", - "/error/msg=='This IndexSchema is not mutable.'"); - } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java index 571acc52bffc..dd554158eab6 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java @@ -30,45 +30,6 @@ public void testGetAllFields() throws Exception { "(/response/arr[@name='fields']/lst/str[@name='name'])[3] = '_version_'"); } - @Test - public void testGetTwoFields() throws IOException { - assertQ("/schema/fields?indent=on&wt=xml&fl=id,_version_", - "count(/response/arr[@name='fields']/lst/str[@name='name']) = 2", - "(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'id'", - "(/response/arr[@name='fields']/lst/str[@name='name'])[2] = '_version_'"); - } - - @Test - public void testGetThreeFieldsDontIncludeDynamic() throws IOException { - // - assertQ("/schema/fields?indent=on&wt=xml&fl=id,_version_,price_i", - "count(/response/arr[@name='fields']/lst/str[@name='name']) = 2", - "(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'id'", - "(/response/arr[@name='fields']/lst/str[@name='name'])[2] = '_version_'"); - } - - @Test - public void testGetThreeFieldsIncludeDynamic() throws IOException { - assertQ("/schema/fields?indent=on&wt=xml&fl=id,_version_,price_i&includeDynamic=on", - - "count(/response/arr[@name='fields']/lst/str[@name='name']) = 3", - - "(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'id'", - - "(/response/arr[@name='fields']/lst/str[@name='name'])[2] = '_version_'", - - "(/response/arr[@name='fields']/lst/str[@name='name'])[3] = 'price_i'", - - "/response/arr[@name='fields']/lst[ str[@name='name']='price_i' " - +" and str[@name='dynamicBase']='*_i']"); - } - - @Test - public void testNotFoundFields() throws IOException { - assertQ("/schema/fields?indent=on&wt=xml&fl=not_in_there,this_one_either", - "count(/response/arr[@name='fields']) = 1", - "count(/response/arr[@name='fields']/lst/str[@name='name']) = 0"); - } @Test public void testJsonGetAllFields() throws Exception { @@ -78,10 +39,4 @@ public void testJsonGetAllFields() throws Exception { "/fields/[2]/name=='_version_'"); } - @Test - public void testJsonGetTwoFields() throws Exception { - assertJQ("/schema/fields?indent=on&fl=id,_version_&wt=xml", // assertJQ should fix the wt param to be json - "/fields/[0]/name=='id'", - "/fields/[1]/name=='_version_'"); - } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java index 711e3c05face..627aee09e780 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java @@ -72,18 +72,12 @@ public void testJsonGetField() throws Exception { "/field/tokenized==true"); } - @Test - public void testGetFieldIncludeDynamic() throws Exception { - assertQ("/schema/fields/some_crazy_name_i?indent=on&wt=xml&includeDynamic=true", - "/response/lst[@name='field']/str[@name='name'] = 'some_crazy_name_i'", - "/response/lst[@name='field']/str[@name='dynamicBase'] = '*_i'"); - } - + @Test public void testGetFieldDontShowDefaults() throws Exception { String[] tests = { "count(/response/lst[@name='field']) = 1", - "count(/response/lst[@name='field']/*) = 7", + "count(/response/lst[@name='field']/*) = 6", "/response/lst[@name='field']/str[@name='name'] = 'id'", "/response/lst[@name='field']/str[@name='type'] = 'string'", "/response/lst[@name='field']/bool[@name='indexed'] = 'true'", @@ -95,17 +89,4 @@ public void testGetFieldDontShowDefaults() throws Exception { assertQ("/schema/fields/id?indent=on&wt=xml&showDefaults=false", tests); } - @Test - public void testJsonPutFieldToNonMutableIndexSchema() throws Exception { - assertJPut("/schema/fields/newfield", - "{\"type\":\"text_general\", \"stored\":\"false\"}", - "/error/msg=='This IndexSchema is not mutable.'"); - } - - @Test - public void testJsonPostFieldsToNonMutableIndexSchema() throws Exception { - assertJPost("/schema/fields", - "[{\"name\":\"foobarbaz\", \"type\":\"text_general\", \"stored\":\"false\"}]", - "/error/msg=='This IndexSchema is not mutable.'"); - } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java index a61f6444dccd..53cd1c0c8604 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java @@ -19,6 +19,7 @@ import org.junit.Test; public class TestFieldTypeCollectionResource extends SolrRestletTestBase { + @Test public void testGetAllFieldTypes() throws Exception { assertQ("/schema/fieldtypes?indent=on&wt=xml", diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java index 2f0c2410365b..eb72aeda63e6 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java @@ -24,7 +24,7 @@ public class TestFieldTypeResource extends SolrRestletTestBase { public void testGetFieldType() throws Exception { assertQ("/schema/fieldtypes/float?indent=on&wt=xml&showDefaults=true", "count(/response/lst[@name='fieldType']) = 1", - "count(/response/lst[@name='fieldType']/*) = 18", + "count(/response/lst[@name='fieldType']/*) = 16", "/response/lst[@name='fieldType']/str[@name='name'] = 'float'", "/response/lst[@name='fieldType']/str[@name='class'] = 'solr.TrieFloatField'", "/response/lst[@name='fieldType']/str[@name='precisionStep'] ='0'", @@ -39,9 +39,7 @@ public void testGetFieldType() throws Exception { "/response/lst[@name='fieldType']/bool[@name='omitPositions'] = 'false'", "/response/lst[@name='fieldType']/bool[@name='storeOffsetsWithPositions'] = 'false'", "/response/lst[@name='fieldType']/bool[@name='multiValued'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='tokenized'] = 'false'", - "/response/lst[@name='fieldType']/arr[@name='fields']/str = 'weight'", - "/response/lst[@name='fieldType']/arr[@name='dynamicFields']/str = '*_f'"); + "/response/lst[@name='fieldType']/bool[@name='tokenized'] = 'false'"); } @Test @@ -69,22 +67,19 @@ public void testJsonGetFieldType() throws Exception { "/fieldType/omitPositions==false", "/fieldType/storeOffsetsWithPositions==false", "/fieldType/multiValued==false", - "/fieldType/tokenized==false", - "/fieldType/fields==['weight']", - "/fieldType/dynamicFields==['*_f']"); + "/fieldType/tokenized==false"); } @Test public void testGetFieldTypeDontShowDefaults() throws Exception { assertQ("/schema/fieldtypes/teststop?wt=xml&indent=on", - "count(/response/lst[@name='fieldType']/*) = 5", + "count(/response/lst[@name='fieldType']/*) = 3", "/response/lst[@name='fieldType']/str[@name='name'] = 'teststop'", "/response/lst[@name='fieldType']/str[@name='class'] = 'solr.TextField'", "/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.LowerCaseTokenizerFactory'", "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.StandardFilterFactory']", "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.StopFilterFactory']", - "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='words'][.='stopwords.txt']", - "/response/lst[@name='fieldType']/arr[@name='fields']/str[.='teststop']", - "/response/lst[@name='fieldType']/arr[@name='dynamicFields']"); + "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='words'][.='stopwords.txt']" + ); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java deleted file mode 100644 index 65727099eabf..000000000000 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java +++ /dev/null @@ -1,366 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.commons.io.FileUtils; -import org.apache.solr.util.RestTestBase; -import org.eclipse.jetty.servlet.ServletHolder; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.restlet.ext.servlet.ServerServlet; - -import java.io.File; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.regex.Pattern; - -public class TestManagedSchemaDynamicFieldResource extends RestTestBase { - - private static File tmpSolrHome; - private static File tmpConfDir; - - private static final String collection = "collection1"; - private static final String confDir = collection + "/conf"; - - - @Before - public void before() throws Exception { - tmpSolrHome = createTempDir().toFile(); - tmpConfDir = new File(tmpSolrHome, confDir); - FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - - final SortedMap extraServlets = new TreeMap<>(); - final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class); - solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi"); - extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...' - - System.setProperty("managed.schema.mutable", "true"); - System.setProperty("enable.update.log", "false"); - - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, extraServlets); - } - - @After - public void after() throws Exception { - if (jetty != null) { - jetty.stop(); - jetty = null; - } - client = null; - if (restTestHarness != null) { - restTestHarness.close(); - } - restTestHarness = null; - } - - @Test - public void testAddDynamicFieldBadFieldType() throws Exception { - assertJPut("/schema/dynamicfields/*_newdynamicfield", - json( "{'type':'not_in_there_at_all','stored':false}" ), - "/error/msg==\"Dynamic field \\'*_newdynamicfield\\': Field type \\'not_in_there_at_all\\' not found.\""); - } - - @Test - public void testAddDynamicFieldMismatchedName() throws Exception { - assertJPut("/schema/dynamicfields/*_newdynamicfield", - json( "{'name':'*_something_else','type':'text','stored':false}" ), - "/error/msg=='///regex:\\\\*_newdynamicfield///'"); - } - - @Test - public void testAddDynamicFieldBadProperty() throws Exception { - assertJPut("/schema/dynamicfields/*_newdynamicfield", - json( "{'type':'text','no_property_with_this_name':false}" ), - "/error/msg==\"java.lang.IllegalArgumentException: Invalid field property: no_property_with_this_name\""); - } - - @Test - public void testAddDynamicField() throws Exception { - assertQ("/schema/dynamicfields/newdynamicfield_*?indent=on&wt=xml", - "count(/response/lst[@name='newdynamicfield_*']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/dynamicfields/newdynamicfield_*", - json("{'type':'text','stored':false}"), - "/responseHeader/status==0"); - - assertQ("/schema/dynamicfields/newdynamicfield_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - - assertU(adoc("newdynamicfield_A", "value1 value2", "id", "123")); - assertU(commit()); - - assertQ("/select?q=newdynamicfield_A:value1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc/*) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='123']"); - } - - @Test - public void testAddDynamicFieldWithMulipleOptions() throws Exception { - assertQ("/schema/dynamicfields/newdynamicfield_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/dynamicfields/newdynamicfield_*", - json("{'type':'text_en','stored':true,'indexed':false}"), - "/responseHeader/status==0"); - - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - assertTrue(managedSchemaFile.exists()); - String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newdynamicfieldStoredTrueIndexedFalsePattern - = Pattern.compile( ""); - assertTrue(newdynamicfieldStoredTrueIndexedFalsePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/dynamicfields/newdynamicfield_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/lst[@name='dynamicField']/str[@name='name'] = 'newdynamicfield_*'", - "/response/lst[@name='dynamicField']/str[@name='type'] = 'text_en'", - "/response/lst[@name='dynamicField']/bool[@name='indexed'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='stored'] = 'true'"); - - assertU(adoc("newdynamicfield_A", "value1 value2", "id", "1234")); - assertU(commit()); - - assertQ("/schema/dynamicfields/newdynamicfield2_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/dynamicfields/newdynamicfield2_*", - json("{'type':'text_en','stored':true,'indexed':true,'multiValued':true}"), - "/responseHeader/status==0"); - - managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newdynamicfield2StoredTrueIndexedTrueMultiValuedTruePattern - = Pattern.compile( ""); - assertTrue(newdynamicfield2StoredTrueIndexedTrueMultiValuedTruePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/dynamicfields/newdynamicfield2_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/lst[@name='dynamicField']/str[@name='name'] = 'newdynamicfield2_*'", - "/response/lst[@name='dynamicField']/str[@name='type'] = 'text_en'", - "/response/lst[@name='dynamicField']/bool[@name='indexed'] = 'true'", - "/response/lst[@name='dynamicField']/bool[@name='stored'] = 'true'", - "/response/lst[@name='dynamicField']/bool[@name='multiValued'] = 'true'"); - - assertU(adoc("newdynamicfield2_A", "value1 value2", "newdynamicfield2_A", "value3 value4", "id", "5678")); - assertU(commit()); - - assertQ("/select?q=newdynamicfield2_A:value3", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='5678']"); - } - - @Test - public void testAddDynamicFieldCollectionWithMultipleOptions() throws Exception { - assertQ("/schema/dynamicfields?indent=on&wt=xml", - "count(/response/arr[@name='dynamicFields']/lst/str[@name]) > 0", // there are fields - "count(/response/arr[@name='dynamicFields']/lst/str[starts-with(@name,'newfield')]) = 0"); // but none named newfield* - - assertJPost("/schema/dynamicfields", - json("[{'name':'newdynamicfield_*','type':'text_en','stored':true,'indexed':false}]"), - "/responseHeader/status==0"); - - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - assertTrue(managedSchemaFile.exists()); - String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newfieldStoredTrueIndexedFalsePattern - = Pattern.compile( ""); - assertTrue(newfieldStoredTrueIndexedFalsePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/dynamicfields?indent=on&wt=xml", - "/response/arr[@name='dynamicFields']/lst" - + "[str[@name='name']='newdynamicfield_*' and str[@name='type']='text_en'" - + " and bool[@name='stored']='true' and bool[@name='indexed']='false']"); - - assertU(adoc("newdynamicfield_A", "value1 value2", "id", "789")); - assertU(commit()); - - assertJPost("/schema/dynamicfields", - json("[{'name':'newdynamicfield2_*','type':'text_en','stored':true,'indexed':true,'multiValued':true}]"), - "/responseHeader/status==0"); - - managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newdynamicfield2StoredTrueIndexedTrueMultiValuedTruePattern - = Pattern.compile( ""); - assertTrue(newdynamicfield2StoredTrueIndexedTrueMultiValuedTruePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/dynamicfields?indent=on&wt=xml", - "/response/arr[@name='dynamicFields']/lst" - + "[str[@name='name']='newdynamicfield2_*' and str[@name='type']='text_en'" - + " and bool[@name='stored']='true' and bool[@name='indexed']='true' and bool[@name='multiValued']='true']"); - - assertU(adoc("newdynamicfield2_A", "value1 value2", "newdynamicfield2_A", "value3 value4", "id", "790")); - assertU(commit()); - - assertQ("/select?q=newdynamicfield2_A:value3", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='790']"); - } - - - @Test - public void testAddCopyField() throws Exception { - assertQ("/schema/dynamicfields/newdynamicfield2_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/dynamicfields/dynamicfieldA_*", - json("{'type':'text','stored':false}"), - "/responseHeader/status==0"); - assertJPut("/schema/dynamicfields/dynamicfieldB_*", - json("{'type':'text','stored':false, 'copyFields':['dynamicfieldA_*']}"), - "/responseHeader/status==0"); - assertJPut("/schema/dynamicfields/dynamicfieldC_*", - json("{'type':'text','stored':false, 'copyFields':'dynamicfieldA_*'}"), - "/responseHeader/status==0"); - - assertQ("/schema/dynamicfields/dynamicfieldB_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=dynamicfieldB_*", - "count(/response/arr[@name='copyFields']/lst) = 1"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=dynamicfieldC_*", - "count(/response/arr[@name='copyFields']/lst) = 1"); - //fine to pass in empty list, just won't do anything - assertJPut("/schema/dynamicfields/dynamicfieldD_*", - json("{'type':'text','stored':false, 'copyFields':[]}"), - "/responseHeader/status==0"); - //some bad usages - assertJPut("/schema/dynamicfields/dynamicfieldF_*", - json("{'type':'text','stored':false, 'copyFields':['some_nonexistent_dynamicfield_ignore_exception_*']}"), - "/error/msg==\"copyField dest :\\'some_nonexistent_dynamicfield_ignore_exception_*\\' is not an explicit field and doesn\\'t match a dynamicField.\""); - } - - @Test - public void testPostMultipleDynamicFields() throws Exception { - assertQ("/schema/dynamicfields/newdynamicfield1_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertQ("/schema/dynamicfields/newdynamicfield2_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPost("/schema/dynamicfields", - json( "[{'name':'newdynamicfield1_*','type':'text','stored':false}," - + " {'name':'newdynamicfield2_*','type':'text','stored':false}]"), - "/responseHeader/status==0"); - - assertQ("/schema/dynamicfields/newdynamicfield1_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - - assertQ("/schema/dynamicfields/newdynamicfield2_*?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - - assertU(adoc("newdynamicfield1_A", "value1 value2", "id", "123")); - assertU(adoc("newdynamicfield2_A", "value3 value4", "id", "456")); - assertU(commit()); - - assertQ("/select?q=newdynamicfield1_A:value1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc/*) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='123']"); - assertQ("/select?q=newdynamicfield2_A:value3", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc/*) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='456']"); - } - - @Test - public void testPostCopy() throws Exception { - assertJPost("/schema/dynamicfields", - json( "[{'name':'dynamicfieldA_*','type':'text','stored':false}," - + " {'name':'dynamicfieldB_*','type':'text','stored':false}," - + " {'name':'dynamicfieldC_*','type':'text','stored':false, 'copyFields':['dynamicfieldB_*']}]"), - "/responseHeader/status==0"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=dynamicfieldC_*", - "count(/response/arr[@name='copyFields']/lst) = 1"); - assertJPost("/schema/dynamicfields", - json( "[{'name':'dynamicfieldD_*','type':'text','stored':false}," - + " {'name':'dynamicfieldE_*','type':'text','stored':false}," - + " {'name':'dynamicfieldF_*','type':'text','stored':false, 'copyFields':['dynamicfieldD_*','dynamicfieldE_*']}," - + " {'name':'dynamicfieldG_*','type':'text','stored':false, 'copyFields':'dynamicfieldD_*'}]"),//single - "/responseHeader/status==0"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=dynamicfieldF_*", - "count(/response/arr[@name='copyFields']/lst) = 2"); - //passing in an empty list is perfectly acceptable, it just won't do anything - assertJPost("/schema/dynamicfields", - json( "[{'name':'dynamicfieldX_*','type':'text','stored':false}," - + " {'name':'dynamicfieldY_*','type':'text','stored':false}," - + " {'name':'dynamicfieldZ_*','type':'text','stored':false, 'copyFields':[]}]"), - "/responseHeader/status==0"); - //some bad usages - - assertJPost("/schema/dynamicfields", - json( "[{'name':'dynamicfieldH_*','type':'text','stored':false}," - + " {'name':'dynamicfieldI_*','type':'text','stored':false}," - + " {'name':'dynamicfieldJ_*','type':'text','stored':false, 'copyFields':['some_nonexistent_dynamicfield_ignore_exception_*']}]"), - "/error/msg=='copyField dest :\\'some_nonexistent_dynamicfield_ignore_exception_*\\' is not an explicit field and doesn\\'t match a dynamicField.'"); - } - - @Test - public void testPostCopyFields() throws Exception { - assertJPost("/schema/dynamicfields", - json( "[{'name':'dynamicfieldA_*','type':'text','stored':false}," - + " {'name':'dynamicfieldB_*','type':'text','stored':false}," - + " {'name':'dynamicfieldC_*','type':'text','stored':false}," - + " {'name':'dynamicfieldD_*','type':'text','stored':false}," - + " {'name':'dynamicfieldE_*','type':'text','stored':false}]"), - "/responseHeader/status==0"); - assertJPost("/schema/copyfields", - json( "[{'source':'dynamicfieldA_*', 'dest':'dynamicfieldB_*'}," - + " {'source':'dynamicfieldD_*', 'dest':['dynamicfieldC_*', 'dynamicfieldE_*']}]"), - "/responseHeader/status==0"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=dynamicfieldA_*", - "count(/response/arr[@name='copyFields']/lst) = 1"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=dynamicfieldD_*", - "count(/response/arr[@name='copyFields']/lst) = 2"); - assertJPost("/schema/copyfields", // copyField glob sources are not required to match a dynamic field - json("[{'source':'some_glob_not_necessarily_matching_any_dynamicfield_*', 'dest':['dynamicfieldA_*']}," - +" {'source':'*', 'dest':['dynamicfieldD_*']}]"), - "/responseHeader/status==0"); - assertJPost("/schema/copyfields", - json("[{'source':'dynamicfieldD_*', 'dest':['some_nonexistent_dynamicfield_ignore_exception_*']}]"), - "/error/msg=='copyField dest :\\'some_nonexistent_dynamicfield_ignore_exception_*\\' is not an explicit field and doesn\\'t match a dynamicField.'"); - } -} - diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java deleted file mode 100644 index b39d266f2238..000000000000 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java +++ /dev/null @@ -1,369 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import org.apache.commons.io.FileUtils; -import org.apache.solr.util.RestTestBase; -import org.eclipse.jetty.servlet.ServletHolder; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.restlet.ext.servlet.ServerServlet; - -import java.io.File; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.regex.Pattern; - -public class TestManagedSchemaFieldResource extends RestTestBase { - - private static File tmpSolrHome; - private static File tmpConfDir; - - private static final String collection = "collection1"; - private static final String confDir = collection + "/conf"; - - - @Before - public void before() throws Exception { - tmpSolrHome = createTempDir().toFile(); - tmpConfDir = new File(tmpSolrHome, confDir); - FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - - final SortedMap extraServlets = new TreeMap<>(); - final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class); - solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi"); - extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...' - - System.setProperty("managed.schema.mutable", "true"); - System.setProperty("enable.update.log", "false"); - - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, extraServlets); - } - - @After - public void after() throws Exception { - if (jetty != null) { - jetty.stop(); - jetty = null; - } - client = null; - if (restTestHarness != null) { - restTestHarness.close(); - } - restTestHarness = null; - } - - @Test - public void testAddFieldBadFieldType() throws Exception { - assertJPut("/schema/fields/newfield", - json( "{'type':'not_in_there_at_all','stored':false}" ), - "/error/msg==\"Field \\'newfield\\': Field type \\'not_in_there_at_all\\' not found.\""); - } - - @Test - public void testAddFieldMismatchedName() throws Exception { - assertJPut("/schema/fields/newfield", - json( "{'name':'something_else','type':'text','stored':false}" ), - "/error/msg=='///regex:newfield///'"); - } - - @Test - public void testAddFieldBadProperty() throws Exception { - assertJPut("/schema/fields/newfield", - json( "{'type':'text','no_property_with_this_name':false}" ), - "/error/msg==\"java.lang.IllegalArgumentException: Invalid field property: no_property_with_this_name\""); - } - - @Test - public void testAddField() throws Exception { - assertQ("/schema/fields/newfield?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/fields/newfield", - json("{'type':'text','stored':false}"), - "/responseHeader/status==0"); - - assertQ("/schema/fields/newfield?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - - assertU(adoc("newfield", "value1 value2", "id", "123")); - assertU(commit()); - - assertQ("/select?q=newfield:value1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc/*) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='123']"); - } - - @Test - public void testAddFieldWithMulipleOptions() throws Exception { - assertQ("/schema/fields/newfield?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/fields/newfield", - json("{'type':'text_en','stored':true,'indexed':false}"), - "/responseHeader/status==0"); - - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - assertTrue(managedSchemaFile.exists()); - String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newfieldStoredTrueIndexedFalsePattern - = Pattern.compile( ""); - assertTrue(newfieldStoredTrueIndexedFalsePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/fields/newfield?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/lst[@name='field']/str[@name='name'] = 'newfield'", - "/response/lst[@name='field']/str[@name='type'] = 'text_en'", - "/response/lst[@name='field']/bool[@name='indexed'] = 'false'", - "/response/lst[@name='field']/bool[@name='stored'] = 'true'"); - - assertU(adoc("newfield", "value1 value2", "id", "1234")); - assertU(commit()); - - assertQ("/schema/fields/newfield2?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/fields/newfield2", - json("{'type':'text_en','stored':true,'indexed':true,'multiValued':true}"), - "/responseHeader/status==0"); - - managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newfield2StoredTrueIndexedTrueMultiValuedTruePattern - = Pattern.compile( ""); - assertTrue(newfield2StoredTrueIndexedTrueMultiValuedTruePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/fields/newfield2?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/lst[@name='field']/str[@name='name'] = 'newfield2'", - "/response/lst[@name='field']/str[@name='type'] = 'text_en'", - "/response/lst[@name='field']/bool[@name='indexed'] = 'true'", - "/response/lst[@name='field']/bool[@name='stored'] = 'true'", - "/response/lst[@name='field']/bool[@name='multiValued'] = 'true'"); - - assertU(adoc("newfield2", "value1 value2", "newfield2", "value3 value4", "id", "5678")); - assertU(commit()); - - assertQ("/select?q=newfield2:value3", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='5678']"); - } - - @Test - public void testAddFieldCollectionWithMultipleOptions() throws Exception { - assertQ("/schema/fields?indent=on&wt=xml", - "count(/response/arr[@name='fields']/lst/str[@name]) > 0", // there are fields - "count(/response/arr[@name='fields']/lst/str[starts-with(@name,'newfield')]) = 0"); // but none named newfield* - - assertJPost("/schema/fields", - json("[{'name':'newfield','type':'text_en','stored':true,'indexed':false}]"), - "/responseHeader/status==0"); - - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - assertTrue(managedSchemaFile.exists()); - String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newfieldStoredTrueIndexedFalsePattern - = Pattern.compile( ""); - assertTrue(newfieldStoredTrueIndexedFalsePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/fields?indent=on&wt=xml", - "/response/arr[@name='fields']/lst" - + "[str[@name='name']='newfield' and str[@name='type']='text_en'" - + " and bool[@name='stored']='true' and bool[@name='indexed']='false']"); - - assertU(adoc("newfield", "value1 value2", "id", "789")); - assertU(commit()); - - assertJPost("/schema/fields", - json("[{'name':'newfield2','type':'text_en','stored':true,'indexed':true,'multiValued':true}]"), - "/responseHeader/status==0"); - - managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); - Pattern newfield2StoredTrueIndexedTrueMultiValuedTruePattern - = Pattern.compile( ""); - assertTrue(newfield2StoredTrueIndexedTrueMultiValuedTruePattern.matcher(managedSchemaContents).find()); - - assertQ("/schema/fields?indent=on&wt=xml", - "/response/arr[@name='fields']/lst" - + "[str[@name='name']='newfield2' and str[@name='type']='text_en'" - + " and bool[@name='stored']='true' and bool[@name='indexed']='true' and bool[@name='multiValued']='true']"); - - assertU(adoc("newfield2", "value1 value2", "newfield2", "value3 value4", "id", "790")); - assertU(commit()); - - assertQ("/select?q=newfield2:value3", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='790']"); - } - - - @Test - public void testAddCopyField() throws Exception { - assertQ("/schema/fields/newfield2?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPut("/schema/fields/fieldA", - json("{'type':'text','stored':false}"), - "/responseHeader/status==0"); - assertJPut("/schema/fields/fieldB", - json("{'type':'text','stored':false, 'copyFields':['fieldA']}"), - "/responseHeader/status==0"); - assertJPut("/schema/fields/fieldC", - json("{'type':'text','stored':false, 'copyFields':'fieldA'}"), - "/responseHeader/status==0"); - - assertQ("/schema/fields/fieldB?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=fieldB", - "count(/response/arr[@name='copyFields']/lst) = 1" - ); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=fieldC", - "count(/response/arr[@name='copyFields']/lst) = 1" - ); - //fine to pass in empty list, just won't do anything - assertJPut("/schema/fields/fieldD", - json("{'type':'text','stored':false, 'copyFields':[]}"), - "/responseHeader/status==0"); - //some bad usages - assertJPut("/schema/fields/fieldF", - json("{'type':'text','stored':false, 'copyFields':['some_nonexistent_field_ignore_exception']}"), - "/error/msg==\"copyField dest :\\'some_nonexistent_field_ignore_exception\\' is not an explicit field and doesn\\'t match a dynamicField.\""); - } - - @Test - public void testPostMultipleFields() throws Exception { - assertQ("/schema/fields/newfield1?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertQ("/schema/fields/newfield2?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - - assertJPost("/schema/fields", - json( "[{'name':'newfield1','type':'text','stored':false}," - + " {'name':'newfield2','type':'text','stored':false}]"), - "/responseHeader/status==0"); - - assertQ("/schema/fields/newfield1?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - - assertQ("/schema/fields/newfield2?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - - assertU(adoc("newfield1", "value1 value2", "id", "123")); - assertU(adoc("newfield2", "value3 value4", "id", "456")); - assertU(commit()); - - assertQ("/select?q=newfield1:value1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc/*) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='123']"); - assertQ("/select?q=newfield2:value3", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "count(/response/result[@name='response']/doc/*) = 1", - "/response/result[@name='response']/doc/str[@name='id'][.='456']"); - } - - @Test - public void testPostCopy() throws Exception { - assertJPost("/schema/fields", - json( "[{'name':'fieldA','type':'text','stored':false}," - + " {'name':'fieldB','type':'text','stored':false}," - + " {'name':'fieldC','type':'text','stored':false, 'copyFields':['fieldB']}]"), - "/responseHeader/status==0"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=fieldC", - "count(/response/arr[@name='copyFields']/lst) = 1" - ); - assertJPost("/schema/fields", - json( "[{'name':'fieldD','type':'text','stored':false}," - + " {'name':'fieldE','type':'text','stored':false}," - + " {'name':'fieldF','type':'text','stored':false, 'copyFields':['fieldD','fieldE']}," - + " {'name':'fieldG','type':'text','stored':false, 'copyFields':'fieldD'}]"),//single - "/responseHeader/status==0"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=fieldF", - "count(/response/arr[@name='copyFields']/lst) = 2" - ); - //passing in an empty list is perfectly acceptable, it just won't do anything - assertJPost("/schema/fields", - json( "[{'name':'fieldX','type':'text','stored':false}," - + " {'name':'fieldY','type':'text','stored':false}," - + " {'name':'fieldZ','type':'text','stored':false, 'copyFields':[]}]"), - "/responseHeader/status==0"); - //some bad usages - - assertJPost("/schema/fields", - json( "[{'name':'fieldH','type':'text','stored':false}," - + " {'name':'fieldI','type':'text','stored':false}," - + " {'name':'fieldJ','type':'text','stored':false, 'copyFields':['some_nonexistent_field_ignore_exception']}]"), - "/error/msg=='copyField dest :\\'some_nonexistent_field_ignore_exception\\' is not an explicit field and doesn\\'t match a dynamicField.'"); - } - - @Test - public void testPostCopyFields() throws Exception { - assertJPost("/schema/fields", - json( "[{'name':'fieldA','type':'text','stored':false}," - + " {'name':'fieldB','type':'text','stored':false}," - + " {'name':'fieldC','type':'text','stored':false}," - + " {'name':'fieldD','type':'text','stored':false}," - + " {'name':'fieldE','type':'text','stored':false}]"), - "/responseHeader/status==0"); - assertJPost("/schema/copyfields", - json( "[{'source':'fieldA', 'dest':'fieldB'}," - + " {'source':'fieldD', 'dest':['fieldC', 'fieldE']}]"), - "/responseHeader/status==0"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=fieldA", - "count(/response/arr[@name='copyFields']/lst) = 1"); - assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=fieldD", - "count(/response/arr[@name='copyFields']/lst) = 2"); - assertJPost("/schema/copyfields", - json("[{'source':'some_nonexistent_field_ignore_exception', 'dest':['fieldA']}]"), - "/error/msg=='copyField source :\\'some_nonexistent_field_ignore_exception\\' is not a glob and doesn\\'t match any explicit field or dynamicField.'"); - assertJPost("/schema/copyfields", - json("[{'source':'fieldD', 'dest':['some_nonexistent_field_ignore_exception']}]"), - "/error/msg=='copyField dest :\\'some_nonexistent_field_ignore_exception\\' is not an explicit field and doesn\\'t match a dynamicField.'"); - } -} - diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java deleted file mode 100644 index a0f4e2538a74..000000000000 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java +++ /dev/null @@ -1,350 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; - -import org.apache.commons.io.FileUtils; -import org.apache.solr.util.RestTestBase; -import org.eclipse.jetty.servlet.ServletHolder; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.noggit.JSONUtil; -import org.restlet.ext.servlet.ServerServlet; - -public class TestManagedSchemaFieldTypeResource extends RestTestBase { - - private static File tmpSolrHome; - private static File tmpConfDir; - - private static final String collection = "collection1"; - private static final String confDir = collection + "/conf"; - - @Before - public void before() throws Exception { - tmpSolrHome = createTempDir().toFile(); - tmpConfDir = new File(tmpSolrHome, confDir); - FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - - final SortedMap extraServlets = new TreeMap<>(); - final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class); - solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi"); - extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...' - - System.setProperty("managed.schema.mutable", "true"); - System.setProperty("enable.update.log", "false"); - - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, extraServlets); - } - - @After - private void after() throws Exception { - jetty.stop(); - jetty = null; - System.clearProperty("managed.schema.mutable"); - System.clearProperty("enable.update.log"); - - if (restTestHarness != null) { - restTestHarness.close(); - } - restTestHarness = null; - } - - @Test - public void testAddFieldTypes() throws Exception { - - // name mismatch - assertJPut("/schema/fieldtypes/myIntFieldType", - json("{'name':'badNameEh','class':'solr.TrieIntField','stored':false}"), - "/responseHeader/status==400"); - - // no class - assertJPut("/schema/fieldtypes/myIntFieldType", - json("{'stored':false}"), - "/responseHeader/status==400"); - - // invalid attribute - assertJPut("/schema/fieldtypes/myIntFieldType", - json("{'foo':'bar'}"), - "/responseHeader/status==400"); - - // empty analyzer - String ftdef = ""; - ftdef += "{"; - ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',"; - ftdef += " 'analyzer':''"; - ftdef += "}"; - assertJPut("/schema/fieldtypes/emptyAnalyzerFieldType", - json(ftdef), - "/responseHeader/status==400"); - - // basic field types - assertJPut("/schema/fieldtypes/myIntFieldType", - json("{'name':'myIntFieldType','class':'solr.TrieIntField','stored':false}"), - "/responseHeader/status==0"); - checkFieldTypeProps(getExpectedProps("myIntFieldType", "solr.TrieIntField", true, false), 16); - - assertJPut("/schema/fieldtypes/myDoubleFieldType", - json("{'class':'solr.TrieDoubleField','precisionStep':'0','positionIncrementGap':'0'}"), - "/responseHeader/status==0"); - Map expProps = - getExpectedProps("myDoubleFieldType", "solr.TrieDoubleField", true, true); - // add some additional expected props for this type - expProps.put("precisionStep", "0"); - expProps.put("positionIncrementGap", "0"); - checkFieldTypeProps(expProps, 18); - - assertJPut("/schema/fieldtypes/myBoolFieldType", - json("{'class':'solr.BoolField','sortMissingLast':true}"), - "/responseHeader/status==0"); - expProps = getExpectedProps("myBoolFieldType", "solr.BoolField", true, true); - expProps.put("sortMissingLast", true); - checkFieldTypeProps(expProps, 17); - - // a text analyzing field type - ftdef = "{"; - ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',"; - ftdef += " 'analyzer':{"; - ftdef += " 'charFilters':["; - ftdef += " {'class':'solr.PatternReplaceCharFilterFactory','replacement':'$1$1','pattern':'([a-zA-Z])\\\\1+'}"; - ftdef += " ],"; - ftdef += " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},"; - ftdef += " 'filters':["; - ftdef += " {'class':'solr.WordDelimiterFilterFactory','preserveOriginal':'0'},"; - ftdef += " {'class':'solr.StopFilterFactory','words':'stopwords.txt','ignoreCase':'true'},"; - ftdef += " {'class':'solr.LowerCaseFilterFactory'},"; - ftdef += " {'class':'solr.ASCIIFoldingFilterFactory'},"; - ftdef += " {'class':'solr.KStemFilterFactory'}"; - ftdef += " ]"; - ftdef += " }"; - ftdef += "}"; - - assertJPut("/schema/fieldtypes/myTextFieldType", json(ftdef), "/responseHeader/status==0"); - - expProps = getExpectedProps("myTextFieldType", "solr.TextField", true, true); - expProps.put("autoGeneratePhraseQueries", false); - expProps.put("omitNorms", false); - expProps.put("omitTermFreqAndPositions", false); - expProps.put("omitPositions", false); - expProps.put("storeOffsetsWithPositions", false); - expProps.put("tokenized", true); - - List analyzerTests = new ArrayList<>(); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='charFilters']/lst[1]/str[@name='class'] = 'solr.PatternReplaceCharFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.WhitespaceTokenizerFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.WordDelimiterFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.StopFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[3]/str[@name='class'] = 'solr.LowerCaseFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[4]/str[@name='class'] = 'solr.ASCIIFoldingFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[5]/str[@name='class'] = 'solr.KStemFilterFactory'"); - checkFieldTypeProps(expProps, 19, analyzerTests); - - // now add a field type that uses managed resources and a field that uses that type - - String piglatinStopWordEndpoint = "/schema/analysis/stopwords/piglatin"; - String piglatinSynonymEndpoint = "/schema/analysis/synonyms/piglatin"; - - // now define a new FieldType that uses the managed piglatin endpoints - // the managed endpoints will be autovivified as needed - ftdef = "{"; - ftdef += " 'class':'solr.TextField',"; - ftdef += " 'analyzer':{"; - ftdef += " 'tokenizer':{'class':'solr.StandardTokenizerFactory'},"; - ftdef += " 'filters':["; - ftdef += " {'class':'solr.ManagedStopFilterFactory','managed':'piglatin'},"; - ftdef += " {'class':'solr.ManagedSynonymFilterFactory','managed':'piglatin'}"; - ftdef += " ]"; - ftdef += " }"; - ftdef += "}"; - assertJPut("/schema/fieldtypes/piglatinFieldType", json(ftdef), "/responseHeader/status==0"); - - expProps = getExpectedProps("piglatinFieldType", "solr.TextField", true, true); - expProps.put("autoGeneratePhraseQueries", false); - expProps.put("omitNorms", false); - expProps.put("omitTermFreqAndPositions", false); - expProps.put("omitPositions", false); - expProps.put("storeOffsetsWithPositions", false); - expProps.put("tokenized", true); - - analyzerTests = new ArrayList<>(); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.StandardTokenizerFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.ManagedStopFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.ManagedSynonymFilterFactory'"); - checkFieldTypeProps(expProps, 18, analyzerTests); - - assertJQ(piglatinSynonymEndpoint, - "/synonymMappings/initArgs/ignoreCase==false", - "/synonymMappings/managedMap=={}"); - - // add some piglatin synonyms - Map> syns = new HashMap<>(); - syns.put("appyhay", Arrays.asList("ladgay","oyfuljay")); - assertJPut(piglatinSynonymEndpoint, - JSONUtil.toJSON(syns), - "/responseHeader/status==0"); - assertJQ(piglatinSynonymEndpoint, - "/synonymMappings/managedMap/appyhay==['ladgay','oyfuljay']"); - - // add some piglatin stopwords - assertJPut(piglatinStopWordEndpoint, - JSONUtil.toJSON(Arrays.asList("hetay")), - "/responseHeader/status==0"); - - assertJQ(piglatinStopWordEndpoint + "/hetay", "/hetay=='hetay'"); - - // add a field that uses our new type - assertJPut("/schema/fields/newManagedField", - json("{'type':'piglatinFieldType','stored':false}"), - "/responseHeader/status==0"); - - assertQ("/schema/fields/newManagedField?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); - - // try to delete the managed synonyms endpoint, which should fail because it is being used - assertJDelete(piglatinSynonymEndpoint, "/responseHeader/status==403"); - - // test adding multiple field types at once - ftdef = "["; - ftdef += "{"; - ftdef += " 'name':'textFieldType1',"; - ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',"; - ftdef += " 'analyzer':{"; - ftdef += " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},"; - ftdef += " 'filters':["; - ftdef += " {'class':'solr.WordDelimiterFilterFactory','preserveOriginal':'0'},"; - ftdef += " {'class':'solr.StopFilterFactory','words':'stopwords.txt','ignoreCase':'true'},"; - ftdef += " {'class':'solr.LowerCaseFilterFactory'}"; - ftdef += " ]"; - ftdef += " }"; - ftdef += "},{"; - ftdef += " 'name':'textFieldType2',"; - ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',"; - ftdef += " 'analyzer':{"; - ftdef += " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},"; - ftdef += " 'filters':["; - ftdef += " {'class':'solr.WordDelimiterFilterFactory','preserveOriginal':'0'},"; - ftdef += " {'class':'solr.StopFilterFactory','words':'stopwords.txt','ignoreCase':'true'},"; - ftdef += " {'class':'solr.LowerCaseFilterFactory'},"; - ftdef += " {'class':'solr.ASCIIFoldingFilterFactory'}"; - ftdef += " ]"; - ftdef += " }"; - ftdef += "}"; - ftdef += "]"; - - assertJPost("/schema/fieldtypes", json(ftdef), "/responseHeader/status==0"); - - expProps = getExpectedProps("textFieldType1", "solr.TextField", true, true); - expProps.put("autoGeneratePhraseQueries", false); - expProps.put("omitNorms", false); - expProps.put("omitTermFreqAndPositions", false); - expProps.put("omitPositions", false); - expProps.put("storeOffsetsWithPositions", false); - expProps.put("tokenized", true); - - analyzerTests = new ArrayList<>(); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.WhitespaceTokenizerFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.WordDelimiterFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.StopFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[3]/str[@name='class'] = 'solr.LowerCaseFilterFactory'"); - checkFieldTypeProps(expProps, 19, analyzerTests); - - expProps = getExpectedProps("textFieldType2", "solr.TextField", true, true); - expProps.put("autoGeneratePhraseQueries", false); - expProps.put("omitNorms", false); - expProps.put("omitTermFreqAndPositions", false); - expProps.put("omitPositions", false); - expProps.put("storeOffsetsWithPositions", false); - expProps.put("tokenized", true); - - analyzerTests = new ArrayList<>(); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.WhitespaceTokenizerFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.WordDelimiterFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.StopFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[3]/str[@name='class'] = 'solr.LowerCaseFilterFactory'"); - analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[4]/str[@name='class'] = 'solr.ASCIIFoldingFilterFactory'"); - checkFieldTypeProps(expProps, 19, analyzerTests); - } - - /** - * Helper function to check fieldType settings against a set of expected values. - */ - protected void checkFieldTypeProps(Map expected, int expectedChildCount) { - checkFieldTypeProps(expected, expectedChildCount, null); - } - - protected void checkFieldTypeProps(Map expected, int expectedChildCount, List addlTests) { - String fieldTypeName = (String)expected.get("name"); - - List tests = new ArrayList<>(); - tests.add("count(/response/lst[@name='fieldType']) = 1"); - tests.add("count(/response/lst[@name='fieldType']/*) = "+expectedChildCount); - tests.add("count(/response/lst[@name='fieldType']/arr[@name='fields']/*) = 0"); - tests.add("count(/response/lst[@name='fieldType']/arr[@name='dynamicFields']/*) = 0"); - for (Map.Entry next : expected.entrySet()) { - Object val = next.getValue(); - String pathType = null; - if (val instanceof Boolean) - pathType = "bool"; - else if (val instanceof String) - pathType = "str"; - else - fail("Unexpected value type "+val.getClass().getName()); - // NOTE: it seems like the fieldtypes endpoint only returns strings or booleans - - String xpath = - "/response/lst[@name='fieldType']/"+pathType+"[@name='"+next.getKey()+"']"; - tests.add(xpath+" = '"+val+"'"); - } - - if (addlTests != null) - tests.addAll(addlTests); - - assertQ("/schema/fieldtypes/"+fieldTypeName+"?indent=on&wt=xml&showDefaults=true", - tests.toArray(new String[0])); - } - - /** - * Builds a map containing expected values for a field type created by this test. - */ - protected Map getExpectedProps(String name, String className, boolean indexed, boolean stored) { - Map map = new HashMap<>(); - map.put("name", name); - map.put("class", className); - map.put("indexed", indexed); - map.put("stored", stored); - map.put("docValues", false); - map.put("termVectors", false); - map.put("termPositions", false); - map.put("termOffsets", false); - map.put("omitNorms", true); - map.put("omitTermFreqAndPositions", true); - map.put("omitPositions", false); - map.put("storeOffsetsWithPositions", false); - map.put("multiValued", false); - map.put("tokenized", false); - return map; - } -} diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestRemoveLastDynamicCopyField.java b/solr/core/src/test/org/apache/solr/rest/schema/TestRemoveLastDynamicCopyField.java deleted file mode 100644 index 4b4ddd34bacd..000000000000 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestRemoveLastDynamicCopyField.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.rest.schema; - -import java.io.File; -import java.io.StringReader; -import java.util.List; -import java.util.Map; - -import org.apache.commons.io.FileUtils; -import org.apache.solr.util.RestTestBase; -import org.junit.After; -import org.junit.Before; -import org.noggit.JSONParser; -import org.noggit.ObjectBuilder; - -public class TestRemoveLastDynamicCopyField extends RestTestBase { - private static File tmpSolrHome; - - @Before - public void before() throws Exception { - tmpSolrHome = createTempDir().toFile(); - FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - - System.setProperty("managed.schema.mutable", "true"); - System.setProperty("enable.update.log", "false"); - - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-single-dynamic-copy-field.xml", - "/solr", true, null); - } - - @After - public void after() throws Exception { - if (jetty != null) { - jetty.stop(); - jetty = null; - } - client = null; - if (restTestHarness != null) { - restTestHarness.close(); - } - restTestHarness = null; - } - - public void test() throws Exception { - List copyFields = getCopyFields(); - assertEquals("There is more than one copyField directive", 1, copyFields.size()); - assertEquals("The copyField source is not '*'", "*", ((Map)copyFields.get(0)).get("source")); - assertEquals("The copyField dest is not 'text'", "text", ((Map)copyFields.get(0)).get("dest")); - - String payload = "{ 'delete-copy-field': { 'source': '*', 'dest': 'text' } }"; - - String response = restTestHarness.post("/schema?wt=json", json(payload)); - Map map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); - assertNull(response, map.get("errors")); - - assertEquals(0, getCopyFields().size()); - } - - private List getCopyFields() throws Exception { - String response = restTestHarness.query("/schema?wt=json"); - System.err.println(response); - Map map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); - return (List)((Map)map.get("schema")).get("copyFields"); - } -} diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java index 6b7be719e666..20166770b71e 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java @@ -24,7 +24,6 @@ public class TestSchemaSimilarityResource extends SolrRestletTestBase { * NOTE: schema used by parent class doesn't define a global sim, so we get the implicit default * which causes the FQN of the class to be returned * - * @see TestClassNameShortening#testShortenedGlobalSimilarityStaysShortened */ @Test public void testGetSchemaSimilarity() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java index 242e5b5f4192..be8c39480e17 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java @@ -140,7 +140,7 @@ public void testManagedStopwords() throws Exception { "/response/lst[@name='error']/int[@name='code'] = '404'"); // add the new field - assertJPut("/schema/fields/" + newFieldName, json("{'type':'managed_en'}"), + assertJPost("/schema/fields", "{add-field : { name :managed_en_field, type : managed_en}}", "/responseHeader/status==0"); // make sure the new field exists now diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java index 9afaf6fb0ca6..26fcde1e44c7 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java @@ -90,8 +90,8 @@ public void testManagedSynonyms() throws Exception { JSONUtil.toJSON(syns), "/responseHeader/status==0"); - assertJQ(endpoint, - "/synonymMappings/managedMap/happy==['cheerful','glad','joyful']"); + assertJQ(endpoint, + "/synonymMappings/managedMap/happy==['cheerful','glad','joyful']"); // request to a specific mapping assertJQ(endpoint+"/happy", @@ -146,7 +146,7 @@ public void testManagedSynonyms() throws Exception { "/response/lst[@name='error']/int[@name='code'] = '404'"); // add the new field - assertJPut("/schema/fields/" + newFieldName, json("{'type':'managed_en'}"), + assertJPost("/schema", "{ add-field : { name: managed_en_field, type : managed_en}}", "/responseHeader/status==0"); // make sure the new field exists now diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java index 3d0d120c0ad5..0993ba113db9 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java @@ -37,11 +37,12 @@ import org.apache.zookeeper.data.Stat; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; import org.restlet.ext.servlet.ServerServlet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +@Ignore public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String SUCCESS_XPATH = "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"; diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java index b723abf467d2..72051b123aad 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java @@ -575,10 +575,6 @@ public void testAddFieldTypeAccuracy() throws Exception { assertThat("solr.TextField", is(equalTo(newFieldTypeRepresentation.getAttributes().get("class")))); assertThat(analyzerDefinition.getTokenizer().get("class"), is(equalTo(newFieldTypeRepresentation.getAnalyzer().getTokenizer().get("class")))); - assertTrue(newFieldTypeRepresentation.getFields().size() == 1); - assertTrue(newFieldTypeRepresentation.getFields().contains(fieldName)); - assertTrue(newFieldTypeRepresentation.getDynamicFields().size() == 1); - assertTrue(newFieldTypeRepresentation.getDynamicFields().contains(dynamicFieldName)); } @Test From 549e6d7c497b1da0368f012ef0a2521cf0548582 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Mon, 7 Mar 2016 18:12:10 -0500 Subject: [PATCH 0051/1113] LUCENE-7071: reduce byte copying costs of OfflineSorter --- lucene/CHANGES.txt | 5 +++ .../org/apache/lucene/util/ByteBlockPool.java | 22 ++++++++++ .../org/apache/lucene/util/BytesRefArray.java | 41 +++++++++++++++---- .../org/apache/lucene/util/OfflineSorter.java | 3 +- .../search/suggest/SortedInputIterator.java | 16 ++++---- 5 files changed, 69 insertions(+), 18 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 3647d5d191cb..290421a65cc6 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -6,6 +6,11 @@ http://s.apache.org/luceneversions ======================= Lucene 6.1.0 ======================= (No Changes) +Optimizations + +* LUCENE-7071: Reduce bytes copying in OfflineSorter, giving ~10% + speedup on merging 2D LatLonPoint values (Mike McCandless) + ======================= Lucene 6.0.0 ======================= System Requirements diff --git a/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java b/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java index 5f8fd4180d6b..6bb12bdfea66 100644 --- a/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java +++ b/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java @@ -280,6 +280,28 @@ public int allocSlice(final byte[] slice, final int upto) { return newUpto+3; } + /** Fill the provided {@link BytesRef} with the bytes at the specified offset/length slice. + * This will avoid copying the bytes, if the slice fits into a single block; otherwise, it uses + * the provided {@linkl BytesRefBuilder} to copy bytes over. */ + void setBytesRef(BytesRefBuilder builder, BytesRef result, long offset, int length) { + result.length = length; + + int bufferIndex = (int) (offset >> BYTE_BLOCK_SHIFT); + byte[] buffer = buffers[bufferIndex]; + int pos = (int) (offset & BYTE_BLOCK_MASK); + if (pos + length <= BYTE_BLOCK_SIZE) { + // common case where the slice lives in a single block: just reference the buffer directly without copying + result.bytes = buffer; + result.offset = pos; + } else { + // uncommon case: the slice spans at least 2 blocks, so we must copy the bytes: + builder.grow(length); + result.bytes = builder.get().bytes; + result.offset = 0; + readBytes(offset, result.bytes, 0, length); + } + } + // Fill in a BytesRef from term's length & bytes encoded in // byte block public void setBytesRef(BytesRef term, int textStart) { diff --git a/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java b/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java index 47ca52b7145e..a19b7da5abed 100644 --- a/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java +++ b/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java @@ -108,7 +108,23 @@ public BytesRef get(BytesRefBuilder spare, int index) { } throw new IndexOutOfBoundsException("index " + index + " must be less than the size: " + lastElement); - + } + + /** Used only by sort below, to set a {@link BytesRef} with the specified slice, avoiding copying bytes in the common case when the slice + * is contained in a single block in the byte block pool. */ + private void setBytesRef(BytesRefBuilder spare, BytesRef result, int index) { + if (index < lastElement) { + int offset = offsets[index]; + int length; + if (index == lastElement - 1) { + length = currentOffset - offset; + } else { + length = offsets[index + 1] - offset; + } + pool.setBytesRef(spare, result, offset, length); + } else { + throw new IndexOutOfBoundsException("index " + index + " must be less than the size: " + lastElement); + } } private int[] sort(final Comparator comp) { @@ -127,25 +143,30 @@ protected void swap(int i, int j) { @Override protected int compare(int i, int j) { final int idx1 = orderedEntries[i], idx2 = orderedEntries[j]; - return comp.compare(get(scratch1, idx1), get(scratch2, idx2)); + setBytesRef(scratch1, scratchBytes1, idx1); + setBytesRef(scratch2, scratchBytes2, idx2); + return comp.compare(scratchBytes1, scratchBytes2); } @Override protected void setPivot(int i) { final int index = orderedEntries[i]; - pivot = get(pivotBuilder, index); + setBytesRef(pivotBuilder, pivot, index); } @Override protected int comparePivot(int j) { final int index = orderedEntries[j]; - return comp.compare(pivot, get(scratch2, index)); + setBytesRef(scratch2, scratchBytes2, index); + return comp.compare(pivot, scratchBytes2); } - private BytesRef pivot; - private final BytesRefBuilder pivotBuilder = new BytesRefBuilder(), - scratch1 = new BytesRefBuilder(), - scratch2 = new BytesRefBuilder(); + private final BytesRef pivot = new BytesRef(); + private final BytesRef scratchBytes1 = new BytesRef(); + private final BytesRef scratchBytes2 = new BytesRef(); + private final BytesRefBuilder pivotBuilder = new BytesRefBuilder(); + private final BytesRefBuilder scratch1 = new BytesRefBuilder(); + private final BytesRefBuilder scratch2 = new BytesRefBuilder(); }.sort(0, size()); return orderedEntries; } @@ -173,6 +194,7 @@ public BytesRefIterator iterator() { */ public BytesRefIterator iterator(final Comparator comp) { final BytesRefBuilder spare = new BytesRefBuilder(); + final BytesRef result = new BytesRef(); final int size = size(); final int[] indices = comp == null ? null : sort(comp); return new BytesRefIterator() { @@ -181,7 +203,8 @@ public BytesRefIterator iterator(final Comparator comp) { @Override public BytesRef next() { if (pos < size) { - return get(spare, indices == null ? pos++ : indices[pos++]); + setBytesRef(spare, result, indices == null ? pos++ : indices[pos++]); + return result; } return null; } diff --git a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java index 283dc1f90cb6..18e421b7bedf 100644 --- a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java +++ b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java @@ -282,7 +282,6 @@ public String sort(String inputFileName) throws IOException { /** Sort a single partition in-memory. */ protected String sortPartition(TrackingDirectoryWrapper trackingDir) throws IOException { - BytesRefArray data = this.buffer; try (IndexOutput tempFile = trackingDir.createTempOutput(tempFileNamePrefix, "sort", IOContext.DEFAULT); ByteSequencesWriter out = getWriter(tempFile);) { @@ -299,7 +298,7 @@ protected String sortPartition(TrackingDirectoryWrapper trackingDir) throws IOEx } // Clean up the buffer for the next partition. - data.clear(); + buffer.clear(); return tempFile.getName(); } diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/SortedInputIterator.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/SortedInputIterator.java index 2be175943653..bc71e45ffd53 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/SortedInputIterator.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/SortedInputIterator.java @@ -146,6 +146,7 @@ public boolean hasContexts() { @Override public int compare(BytesRef left, BytesRef right) { // Make shallow copy in case decode changes the BytesRef: + assert left != right; leftScratch.bytes = left.bytes; leftScratch.offset = left.offset; leftScratch.length = left.length; @@ -245,24 +246,24 @@ protected void encode(ByteSequencesWriter writer, ByteArrayDataOutput output, by /** decodes the weight at the current position */ protected long decode(BytesRef scratch, ByteArrayDataInput tmpInput) { - tmpInput.reset(scratch.bytes); + tmpInput.reset(scratch.bytes, scratch.offset, scratch.length); tmpInput.skipBytes(scratch.length - 8); // suggestion - scratch.length -= 8; // long + scratch.length -= Long.BYTES; // long return tmpInput.readLong(); } /** decodes the contexts at the current position */ protected Set decodeContexts(BytesRef scratch, ByteArrayDataInput tmpInput) { - tmpInput.reset(scratch.bytes); + tmpInput.reset(scratch.bytes, scratch.offset, scratch.length); tmpInput.skipBytes(scratch.length - 2); //skip to context set size short ctxSetSize = tmpInput.readShort(); scratch.length -= 2; final Set contextSet = new HashSet<>(); for (short i = 0; i < ctxSetSize; i++) { - tmpInput.setPosition(scratch.length - 2); + tmpInput.setPosition(scratch.offset + scratch.length - 2); short curContextLength = tmpInput.readShort(); scratch.length -= 2; - tmpInput.setPosition(scratch.length - curContextLength); + tmpInput.setPosition(scratch.offset + scratch.length - curContextLength); BytesRef contextSpare = new BytesRef(curContextLength); tmpInput.readBytes(contextSpare.bytes, 0, curContextLength); contextSpare.length = curContextLength; @@ -274,10 +275,11 @@ protected Set decodeContexts(BytesRef scratch, ByteArrayDataInput tmpI /** decodes the payload at the current position */ protected BytesRef decodePayload(BytesRef scratch, ByteArrayDataInput tmpInput) { - tmpInput.reset(scratch.bytes); + tmpInput.reset(scratch.bytes, scratch.offset, scratch.length); tmpInput.skipBytes(scratch.length - 2); // skip to payload size short payloadLength = tmpInput.readShort(); // read payload size - tmpInput.setPosition(scratch.length - 2 - payloadLength); // setPosition to start of payload + assert payloadLength >= 0: payloadLength; + tmpInput.setPosition(scratch.offset + scratch.length - 2 - payloadLength); // setPosition to start of payload BytesRef payloadScratch = new BytesRef(payloadLength); tmpInput.readBytes(payloadScratch.bytes, 0, payloadLength); // read payload payloadScratch.length = payloadLength; From d7ee7c661529b2a8c81c3cc52d581b4a5f19b5b8 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 7 Mar 2016 16:12:15 -0500 Subject: [PATCH 0052/1113] LUCENE-7073: fix FieldType issues with Points --- .../benchmark/byTask/feeds/DocMaker.java | 44 ++++++++----------- .../byTask/tasks/ReadTokensTask.java | 13 ++---- .../lucene/codecs/lucene60/package-info.java | 7 ++- .../org/apache/lucene/document/FieldType.java | 24 +++++++--- .../document/SortedNumericDocValuesField.java | 4 +- .../apache/lucene/document/TestFieldType.java | 15 +++++-- .../apache/lucene/index/TestPointValues.java | 3 +- .../lucene/facet/range/DoubleRange.java | 3 +- 8 files changed, 59 insertions(+), 54 deletions(-) diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/DocMaker.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/DocMaker.java index f2c863cf7985..4afafc321c77 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/DocMaker.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/DocMaker.java @@ -35,13 +35,12 @@ import org.apache.lucene.benchmark.byTask.utils.Config; import org.apache.lucene.document.Document; +import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType.LegacyNumericType; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.LegacyIntField; -import org.apache.lucene.document.LegacyDoubleField; -import org.apache.lucene.document.LegacyLongField; -import org.apache.lucene.document.LegacyFloatField; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; @@ -119,8 +118,8 @@ public DocState(boolean reuseFields, FieldType ft, FieldType bodyFt) { fields.put(ID_FIELD, new StringField(ID_FIELD, "", Field.Store.YES)); fields.put(NAME_FIELD, new Field(NAME_FIELD, "", ft)); - numericFields.put(DATE_MSEC_FIELD, new LegacyLongField(DATE_MSEC_FIELD, 0L, Field.Store.NO)); - numericFields.put(TIME_SEC_FIELD, new LegacyIntField(TIME_SEC_FIELD, 0, Field.Store.NO)); + numericFields.put(DATE_MSEC_FIELD, new LongPoint(DATE_MSEC_FIELD, 0L)); + numericFields.put(TIME_SEC_FIELD, new IntPoint(TIME_SEC_FIELD, 0)); doc = new Document(); } else { @@ -148,7 +147,7 @@ Field getField(String name, FieldType ft) { return f; } - Field getNumericField(String name, LegacyNumericType type) { + Field getNumericField(String name, Class numericType) { Field f; if (reuseFields) { f = numericFields.get(name); @@ -157,21 +156,16 @@ Field getNumericField(String name, LegacyNumericType type) { } if (f == null) { - switch(type) { - case INT: - f = new LegacyIntField(name, 0, Field.Store.NO); - break; - case LONG: - f = new LegacyLongField(name, 0L, Field.Store.NO); - break; - case FLOAT: - f = new LegacyFloatField(name, 0.0F, Field.Store.NO); - break; - case DOUBLE: - f = new LegacyDoubleField(name, 0.0, Field.Store.NO); - break; - default: - throw new AssertionError("Cannot get here"); + if (numericType.equals(Integer.class)) { + f = new IntPoint(name, 0); + } else if (numericType.equals(Long.class)) { + f = new LongPoint(name, 0L); + } else if (numericType.equals(Float.class)) { + f = new FloatPoint(name, 0.0F); + } else if (numericType.equals(Double.class)) { + f = new DoublePoint(name, 0.0); + } else { + throw new UnsupportedOperationException("Unsupported numeric type: " + numericType); } if (reuseFields) { numericFields.put(name, f); @@ -278,14 +272,14 @@ private Document createDocument(DocData docData, int size, int cnt) throws Unsup date = new Date(); } - Field dateField = ds.getNumericField(DATE_MSEC_FIELD, FieldType.LegacyNumericType.LONG); + Field dateField = ds.getNumericField(DATE_MSEC_FIELD, Long.class); dateField.setLongValue(date.getTime()); doc.add(dateField); util.cal.setTime(date); final int sec = util.cal.get(Calendar.HOUR_OF_DAY)*3600 + util.cal.get(Calendar.MINUTE)*60 + util.cal.get(Calendar.SECOND); - Field timeSecField = ds.getNumericField(TIME_SEC_FIELD, LegacyNumericType.INT); + Field timeSecField = ds.getNumericField(TIME_SEC_FIELD, Integer.class); timeSecField.setIntValue(sec); doc.add(timeSecField); diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java index 4950d418e713..2e44b99fe3eb 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java @@ -26,11 +26,7 @@ import org.apache.lucene.benchmark.byTask.PerfRunData; import org.apache.lucene.benchmark.byTask.feeds.DocMaker; import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.LegacyDoubleField; -import org.apache.lucene.document.LegacyFloatField; -import org.apache.lucene.document.LegacyIntField; -import org.apache.lucene.document.LegacyLongField; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; /** @@ -73,11 +69,8 @@ public int doLogic() throws Exception { Analyzer analyzer = getRunData().getAnalyzer(); int tokenCount = 0; for(final IndexableField field : fields) { - if (!field.fieldType().tokenized() || - field instanceof LegacyIntField || - field instanceof LegacyLongField || - field instanceof LegacyFloatField || - field instanceof LegacyDoubleField) { + if (field.fieldType().indexOptions() == IndexOptions.NONE || + field.fieldType().tokenized() == false) { continue; } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java index 64531f5c34f9..03a17ba2e38c 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/package-info.java @@ -194,9 +194,9 @@ * *
  • * {@link org.apache.lucene.codecs.lucene60.Lucene60PointsFormat Point values}. - * Optional pair of files, recording dimesionally indexed fields, to enable fast + * Optional pair of files, recording dimensionally indexed fields, to enable fast * numeric range filtering and large numeric values like BigInteger and BigDecimal (1D) - * and geo shape intersection (2D, 3D). + * and geographic shape intersection (2D, 3D). *
  • * *

    Details on each of these are provided in their linked pages.

    @@ -396,6 +396,9 @@ * contain the zlib-crc32 checksum of the file. *
  • In version 4.9, DocValues has a new multi-valued numeric type (SortedNumeric) * that is suitable for faceting/sorting/analytics. + *
  • In version 5.4, DocValues have been improved to store more information on disk: + * addresses for binary fields and ord indexes for multi-valued fields. + *
  • In version 6.0, Points were added, for multi-dimensional range/distance search. *
  • * * diff --git a/lucene/core/src/java/org/apache/lucene/document/FieldType.java b/lucene/core/src/java/org/apache/lucene/document/FieldType.java index 1dfa8792a752..ae84016a327e 100644 --- a/lucene/core/src/java/org/apache/lucene/document/FieldType.java +++ b/lucene/core/src/java/org/apache/lucene/document/FieldType.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableFieldType; +import org.apache.lucene.index.PointValues; import org.apache.lucene.util.LegacyNumericUtils; /** @@ -75,8 +76,8 @@ public FieldType(FieldType ref) { this.numericType = ref.numericType(); this.numericPrecisionStep = ref.numericPrecisionStep(); this.docValuesType = ref.docValuesType(); - this.dimensionCount = dimensionCount; - this.dimensionNumBytes = dimensionNumBytes; + this.dimensionCount = ref.dimensionCount; + this.dimensionNumBytes = ref.dimensionNumBytes; // Do not copy frozen! } @@ -365,18 +366,24 @@ public int numericPrecisionStep() { */ public void setDimensions(int dimensionCount, int dimensionNumBytes) { if (dimensionCount < 0) { - throw new IllegalArgumentException("pointDimensionCount must be >= 0; got " + dimensionCount); + throw new IllegalArgumentException("dimensionCount must be >= 0; got " + dimensionCount); + } + if (dimensionCount > PointValues.MAX_DIMENSIONS) { + throw new IllegalArgumentException("dimensionCount must be <= " + PointValues.MAX_DIMENSIONS + "; got " + dimensionCount); } if (dimensionNumBytes < 0) { - throw new IllegalArgumentException("pointNumBytes must be >= 0; got " + dimensionNumBytes); + throw new IllegalArgumentException("dimensionNumBytes must be >= 0; got " + dimensionNumBytes); + } + if (dimensionCount > PointValues.MAX_NUM_BYTES) { + throw new IllegalArgumentException("dimensionNumBytes must be <= " + PointValues.MAX_NUM_BYTES + "; got " + dimensionNumBytes); } if (dimensionCount == 0) { if (dimensionNumBytes != 0) { - throw new IllegalArgumentException("when pointDimensionCount is 0 pointNumBytes must 0; got " + dimensionNumBytes); + throw new IllegalArgumentException("when dimensionCount is 0, dimensionNumBytes must 0; got " + dimensionNumBytes); } } else if (dimensionNumBytes == 0) { if (dimensionCount != 0) { - throw new IllegalArgumentException("when pointNumBytes is 0 pointDimensionCount must 0; got " + dimensionCount); + throw new IllegalArgumentException("when dimensionNumBytes is 0, dimensionCount must 0; got " + dimensionCount); } } @@ -484,6 +491,8 @@ public void setDocValuesType(DocValuesType type) { public int hashCode() { final int prime = 31; int result = 1; + result = prime * result + dimensionCount; + result = prime * result + dimensionNumBytes; result = prime * result + ((docValuesType == null) ? 0 : docValuesType.hashCode()); result = prime * result + indexOptions.hashCode(); result = prime * result + numericPrecisionStep; @@ -504,6 +513,8 @@ public boolean equals(Object obj) { if (obj == null) return false; if (getClass() != obj.getClass()) return false; FieldType other = (FieldType) obj; + if (dimensionCount != other.dimensionCount) return false; + if (dimensionNumBytes != other.dimensionNumBytes) return false; if (docValuesType != other.docValuesType) return false; if (indexOptions != other.indexOptions) return false; if (numericPrecisionStep != other.numericPrecisionStep) return false; @@ -517,5 +528,4 @@ public boolean equals(Object obj) { if (tokenized != other.tokenized) return false; return true; } - } diff --git a/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java b/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java index 40ceb2595c15..cbba218f7de6 100644 --- a/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java +++ b/lucene/core/src/java/org/apache/lucene/document/SortedNumericDocValuesField.java @@ -31,10 +31,10 @@ * *

    * Note that if you want to encode doubles or floats with proper sort order, - * you will need to encode them with {@link org.apache.lucene.util.LegacyNumericUtils}: + * you will need to encode them with {@link org.apache.lucene.util.NumericUtils}: * *

    - *   document.add(new SortedNumericDocValuesField(name, LegacyNumericUtils.floatToSortableInt(-5.3f)));
    + *   document.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(-5.3f)));
      * 
    * *

    diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java b/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java index 65f32d843e60..c49d4e013bee 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java @@ -23,6 +23,7 @@ import org.apache.lucene.document.FieldType.LegacyNumericType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.PointValues; import org.apache.lucene.util.LuceneTestCase; import com.carrotsearch.randomizedtesting.generators.RandomPicks; @@ -70,6 +71,10 @@ public void testEquals() throws Exception { FieldType ft10 = new FieldType(); ft10.setStoreTermVectors(true); assertFalse(ft10.equals(ft)); + + FieldType ft11 = new FieldType(); + ft11.setDimensions(1, 4); + assertFalse(ft11.equals(ft)); } public void testPointsToString() { @@ -90,14 +95,16 @@ private static Object randomValue(Class clazz) { } private static FieldType randomFieldType() throws Exception { + // setDimensions handled special as values must be in-bounds. + Method setDimensionsMethod = FieldType.class.getMethod("setDimensions", int.class, int.class); FieldType ft = new FieldType(); for (Method method : FieldType.class.getMethods()) { - if ((method.getModifiers() & Modifier.PUBLIC) != 0 && method.getName().startsWith("set")) { + if (method.getName().startsWith("set")) { final Class[] parameterTypes = method.getParameterTypes(); final Object[] args = new Object[parameterTypes.length]; - if (method.getName().equals("setPointDimensions")) { - args[0] = 1 + random().nextInt(15); - args[1] = 1 + random().nextInt(100); + if (method.equals(setDimensionsMethod)) { + args[0] = 1 + random().nextInt(PointValues.MAX_DIMENSIONS); + args[1] = 1 + random().nextInt(PointValues.MAX_NUM_BYTES); } else { for (int i = 0; i < args.length; ++i) { args[i] = randomValue(parameterTypes[i]); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java index 9faa0bcfcce9..7231b1afc6fa 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java @@ -385,9 +385,8 @@ public void testIllegalTooManyDimensions() throws Exception { for(int i=0;i { - w.addDocument(doc); + doc.add(new BinaryPoint("dim", values)); }); Document doc2 = new Document(); diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java index 7585708dde07..6f005ed4a1ae 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java @@ -32,7 +32,6 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; -import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.NumericUtils; /** Represents a range over double values. @@ -50,7 +49,7 @@ public DoubleRange(String label, double minIn, boolean minInclusive, double maxI super(label); // TODO: if DoubleDocValuesField used - // LegacyNumericUtils.doubleToSortableLong format (instead of + // NumericUtils.doubleToSortableLong format (instead of // Double.doubleToRawLongBits) we could do comparisons // in long space From 3a31a8c7686b27e90f9683c4d3a0cdd2e89d3e91 Mon Sep 17 00:00:00 2001 From: David Smiley Date: Mon, 7 Mar 2016 17:29:46 -0500 Subject: [PATCH 0053/1113] LUCENE-7056: Geo3D package re-org (cherry picked from commit 0093e26) --- lucene/CHANGES.txt | 2 ++ .../lucene/spatial/spatial4j/Geo3dShape.java | 12 ++++----- .../spatial/spatial4j/Geo3dRptTest.java | 14 +++++----- .../Geo3dShapeRectRelationTestCase.java | 18 ++++++------- ...Geo3dShapeSphereModelRectRelationTest.java | 16 +++++------ .../Geo3dShapeWGS84ModelRectRelationTest.java | 16 +++++------ .../{geo3d => spatial3d}/Geo3DPoint.java | 6 +++-- .../{geo3d => spatial3d}/Geo3DUtil.java | 2 +- .../PointInGeo3DShapeQuery.java | 7 ++++- .../geom}/ArcDistance.java | 2 +- .../geom}/BasePlanetObject.java | 2 +- .../geom}/BaseXYZSolid.java | 2 +- .../{geo3d => spatial3d/geom}/Bounds.java | 2 +- .../geom}/DistanceStyle.java | 2 +- .../{geo3d => spatial3d/geom}/GeoArea.java | 2 +- .../geom}/GeoAreaFactory.java | 4 +-- .../{geo3d => spatial3d/geom}/GeoBBox.java | 2 +- .../geom}/GeoBBoxFactory.java | 4 +-- .../geom}/GeoBaseBBox.java | 2 +- .../geom}/GeoBaseCircle.java | 2 +- .../geom}/GeoBaseDistanceShape.java | 2 +- .../geom}/GeoBaseMembershipShape.java | 2 +- .../geom}/GeoBasePolygon.java | 2 +- .../geom}/GeoBaseShape.java | 2 +- .../{geo3d => spatial3d/geom}/GeoCircle.java | 2 +- .../geom}/GeoCircleFactory.java | 2 +- .../geom}/GeoCompositeMembershipShape.java | 2 +- .../geom}/GeoCompositePolygon.java | 2 +- .../geom}/GeoConvexPolygon.java | 2 +- .../geom}/GeoDegenerateHorizontalLine.java | 2 +- .../geom}/GeoDegenerateLatitudeZone.java | 2 +- .../geom}/GeoDegenerateLongitudeSlice.java | 2 +- .../geom}/GeoDegeneratePoint.java | 2 +- .../geom}/GeoDegenerateVerticalLine.java | 2 +- .../geom}/GeoDistance.java | 2 +- .../geom}/GeoDistanceShape.java | 2 +- .../geom}/GeoLatitudeZone.java | 2 +- .../geom}/GeoLongitudeSlice.java | 2 +- .../geom}/GeoMembershipShape.java | 2 +- .../geom}/GeoNorthLatitudeZone.java | 2 +- .../geom}/GeoNorthRectangle.java | 2 +- .../geom}/GeoOutsideDistance.java | 2 +- .../{geo3d => spatial3d/geom}/GeoPath.java | 2 +- .../{geo3d => spatial3d/geom}/GeoPoint.java | 4 +-- .../{geo3d => spatial3d/geom}/GeoPolygon.java | 2 +- .../geom}/GeoPolygonFactory.java | 2 +- .../geom}/GeoRectangle.java | 2 +- .../{geo3d => spatial3d/geom}/GeoShape.java | 2 +- .../geom}/GeoSizeable.java | 2 +- .../geom}/GeoSouthLatitudeZone.java | 2 +- .../geom}/GeoSouthRectangle.java | 2 +- .../geom}/GeoStandardCircle.java | 2 +- .../GeoWideDegenerateHorizontalLine.java | 2 +- .../geom}/GeoWideLongitudeSlice.java | 2 +- .../geom}/GeoWideNorthRectangle.java | 2 +- .../geom}/GeoWideRectangle.java | 2 +- .../geom}/GeoWideSouthRectangle.java | 2 +- .../{geo3d => spatial3d/geom}/GeoWorld.java | 2 +- .../geom}/LatLonBounds.java | 2 +- .../geom}/LinearDistance.java | 2 +- .../geom}/LinearSquaredDistance.java | 2 +- .../{geo3d => spatial3d/geom}/Membership.java | 2 +- .../geom}/NormalDistance.java | 2 +- .../geom}/NormalSquaredDistance.java | 2 +- .../{geo3d => spatial3d/geom}/Plane.java | 2 +- .../geom}/PlanetModel.java | 4 +-- .../{geo3d => spatial3d/geom}/SidedPlane.java | 2 +- .../geom}/StandardXYZSolid.java | 2 +- .../{geo3d => spatial3d/geom}/Tools.java | 2 +- .../{geo3d => spatial3d/geom}/Vector.java | 2 +- .../{geo3d => spatial3d/geom}/XYZBounds.java | 2 +- .../{geo3d => spatial3d/geom}/XYZSolid.java | 2 +- .../geom}/XYZSolidFactory.java | 4 +-- .../{geo3d => spatial3d/geom}/XYdZSolid.java | 2 +- .../{geo3d => spatial3d/geom}/XdYZSolid.java | 2 +- .../{geo3d => spatial3d/geom}/XdYdZSolid.java | 2 +- .../{geo3d => spatial3d/geom}/dXYZSolid.java | 2 +- .../{geo3d => spatial3d/geom}/dXYdZSolid.java | 2 +- .../{geo3d => spatial3d/geom}/dXdYZSolid.java | 2 +- .../geom}/dXdYdZSolid.java | 2 +- .../lucene/spatial3d/geom/package-info.java | 22 +++++++++++++++ .../{geo3d => spatial3d}/package-info.java | 4 +-- lucene/spatial3d/src/java/overview.html | 3 ++- .../{geo3d => spatial3d}/TestGeo3DPoint.java | 13 +++++++-- .../geom}/GeoBBoxTest.java | 2 +- .../geom}/GeoCircleTest.java | 9 ++----- .../geom}/GeoConvexPolygonTest.java | 2 +- .../geom}/GeoModelTest.java | 2 +- .../geom}/GeoPathTest.java | 2 +- .../lucene/spatial3d/geom}/GeoPointTest.java | 27 +++++++++---------- .../geom}/GeoPolygonTest.java | 2 +- .../{geo3d => spatial3d/geom}/PlaneTest.java | 2 +- .../geom}/XYZSolidTest.java | 2 +- 93 files changed, 185 insertions(+), 152 deletions(-) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d}/Geo3DPoint.java (95%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d}/Geo3DUtil.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d}/PointInGeo3DShapeQuery.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/ArcDistance.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/BasePlanetObject.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/BaseXYZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/Bounds.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/DistanceStyle.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoArea.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoAreaFactory.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBBox.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBBoxFactory.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBaseBBox.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBaseCircle.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBaseDistanceShape.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBaseMembershipShape.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBasePolygon.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBaseShape.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoCircle.java (95%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoCircleFactory.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoCompositeMembershipShape.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoCompositePolygon.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoConvexPolygon.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoDegenerateHorizontalLine.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoDegenerateLatitudeZone.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoDegenerateLongitudeSlice.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoDegeneratePoint.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoDegenerateVerticalLine.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoDistance.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoDistanceShape.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoLatitudeZone.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoLongitudeSlice.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoMembershipShape.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoNorthLatitudeZone.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoNorthRectangle.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoOutsideDistance.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoPath.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoPoint.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoPolygon.java (95%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoPolygonFactory.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoRectangle.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoShape.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoSizeable.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoSouthLatitudeZone.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoSouthRectangle.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoStandardCircle.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoWideDegenerateHorizontalLine.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoWideLongitudeSlice.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoWideNorthRectangle.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoWideRectangle.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoWideSouthRectangle.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/GeoWorld.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/LatLonBounds.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/LinearDistance.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/LinearSquaredDistance.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/Membership.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/NormalDistance.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/NormalSquaredDistance.java (97%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/Plane.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/PlanetModel.java (98%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/SidedPlane.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/StandardXYZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/Tools.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/Vector.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/XYZBounds.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/XYZSolid.java (95%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/XYZSolidFactory.java (96%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/XYdZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/XdYZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/XdYdZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/dXYZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/dXYdZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/dXdYZSolid.java (99%) rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d/geom}/dXdYdZSolid.java (99%) create mode 100644 lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/package-info.java rename lucene/spatial3d/src/java/org/apache/lucene/{geo3d => spatial3d}/package-info.java (83%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d}/TestGeo3DPoint.java (98%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/GeoBBoxTest.java (99%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/GeoCircleTest.java (99%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/GeoConvexPolygonTest.java (98%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/GeoModelTest.java (99%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/GeoPathTest.java (99%) rename lucene/{spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d => spatial3d/src/test/org/apache/lucene/spatial3d/geom}/GeoPointTest.java (69%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/GeoPolygonTest.java (99%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/PlaneTest.java (98%) rename lucene/spatial3d/src/test/org/apache/lucene/{geo3d => spatial3d/geom}/XYZSolidTest.java (99%) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 290421a65cc6..ca59e6b8f1e1 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -126,6 +126,8 @@ API Changes * LUCENE-7072: Geo3DPoint always uses WGS84 planet model. (Robert Muir, Mike McCandless) +* LUCENE-7056: Geo3D classes are in different packages now. (David Smiley) + Optimizations * LUCENE-6891: Use prefix coding when writing points in diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java index 518fb32b4f17..9fa6d8e5d5fc 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/spatial4j/Geo3dShape.java @@ -23,12 +23,12 @@ import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.SpatialRelation; import org.locationtech.spatial4j.shape.impl.RectangleImpl; -import org.apache.lucene.geo3d.LatLonBounds; -import org.apache.lucene.geo3d.GeoArea; -import org.apache.lucene.geo3d.GeoAreaFactory; -import org.apache.lucene.geo3d.GeoPoint; -import org.apache.lucene.geo3d.GeoShape; -import org.apache.lucene.geo3d.PlanetModel; +import org.apache.lucene.spatial3d.geom.LatLonBounds; +import org.apache.lucene.spatial3d.geom.GeoArea; +import org.apache.lucene.spatial3d.geom.GeoAreaFactory; +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.GeoShape; +import org.apache.lucene.spatial3d.geom.PlanetModel; /** * A Spatial4j Shape wrapping a {@link GeoShape} ("Geo3D") -- a 3D planar geometry based Spatial4j Shape implementation. diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java index d26bb29a5979..e62b857faeb7 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dRptTest.java @@ -32,13 +32,13 @@ import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; import org.apache.lucene.spatial.query.SpatialOperation; import org.apache.lucene.spatial.serialized.SerializedDVStrategy; -import org.apache.lucene.geo3d.GeoBBoxFactory; -import org.apache.lucene.geo3d.GeoStandardCircle; -import org.apache.lucene.geo3d.GeoPath; -import org.apache.lucene.geo3d.GeoPoint; -import org.apache.lucene.geo3d.GeoPolygonFactory; -import org.apache.lucene.geo3d.GeoShape; -import org.apache.lucene.geo3d.PlanetModel; +import org.apache.lucene.spatial3d.geom.GeoBBoxFactory; +import org.apache.lucene.spatial3d.geom.GeoStandardCircle; +import org.apache.lucene.spatial3d.geom.GeoPath; +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.GeoPolygonFactory; +import org.apache.lucene.spatial3d.geom.GeoShape; +import org.apache.lucene.spatial3d.geom.PlanetModel; import org.junit.Test; import static org.locationtech.spatial4j.distance.DistanceUtils.DEGREES_TO_RADIANS; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java index 134b8c753666..d58985f4c558 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeRectRelationTestCase.java @@ -25,15 +25,15 @@ import org.locationtech.spatial4j.shape.Circle; import org.locationtech.spatial4j.shape.Point; import org.locationtech.spatial4j.shape.RectIntersectionTestHelper; -import org.apache.lucene.geo3d.LatLonBounds; -import org.apache.lucene.geo3d.GeoBBox; -import org.apache.lucene.geo3d.GeoBBoxFactory; -import org.apache.lucene.geo3d.GeoStandardCircle; -import org.apache.lucene.geo3d.GeoPath; -import org.apache.lucene.geo3d.GeoPoint; -import org.apache.lucene.geo3d.GeoPolygonFactory; -import org.apache.lucene.geo3d.GeoShape; -import org.apache.lucene.geo3d.PlanetModel; +import org.apache.lucene.spatial3d.geom.LatLonBounds; +import org.apache.lucene.spatial3d.geom.GeoBBox; +import org.apache.lucene.spatial3d.geom.GeoBBoxFactory; +import org.apache.lucene.spatial3d.geom.GeoStandardCircle; +import org.apache.lucene.spatial3d.geom.GeoPath; +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.GeoPolygonFactory; +import org.apache.lucene.spatial3d.geom.GeoShape; +import org.apache.lucene.spatial3d.geom.PlanetModel; import org.junit.Rule; import org.junit.Test; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java index 2d958231e6d2..3bce480fe525 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeSphereModelRectRelationTest.java @@ -20,14 +20,14 @@ import java.util.List; import org.locationtech.spatial4j.shape.Rectangle; -import org.apache.lucene.geo3d.GeoArea; -import org.apache.lucene.geo3d.GeoBBox; -import org.apache.lucene.geo3d.GeoBBoxFactory; -import org.apache.lucene.geo3d.GeoStandardCircle; -import org.apache.lucene.geo3d.GeoPoint; -import org.apache.lucene.geo3d.GeoPolygonFactory; -import org.apache.lucene.geo3d.GeoShape; -import org.apache.lucene.geo3d.PlanetModel; +import org.apache.lucene.spatial3d.geom.GeoArea; +import org.apache.lucene.spatial3d.geom.GeoBBox; +import org.apache.lucene.spatial3d.geom.GeoBBoxFactory; +import org.apache.lucene.spatial3d.geom.GeoStandardCircle; +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.GeoPolygonFactory; +import org.apache.lucene.spatial3d.geom.GeoShape; +import org.apache.lucene.spatial3d.geom.PlanetModel; import org.junit.Test; public class Geo3dShapeSphereModelRectRelationTest extends Geo3dShapeRectRelationTestCase { diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeWGS84ModelRectRelationTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeWGS84ModelRectRelationTest.java index 3b026c36a745..b59d7df50a9a 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeWGS84ModelRectRelationTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/Geo3dShapeWGS84ModelRectRelationTest.java @@ -16,14 +16,14 @@ */ package org.apache.lucene.spatial.spatial4j; -import org.apache.lucene.geo3d.GeoArea; -import org.apache.lucene.geo3d.GeoBBox; -import org.apache.lucene.geo3d.GeoBBoxFactory; -import org.apache.lucene.geo3d.GeoCircle; -import org.apache.lucene.geo3d.GeoStandardCircle; -import org.apache.lucene.geo3d.GeoPath; -import org.apache.lucene.geo3d.GeoPoint; -import org.apache.lucene.geo3d.PlanetModel; +import org.apache.lucene.spatial3d.geom.GeoArea; +import org.apache.lucene.spatial3d.geom.GeoBBox; +import org.apache.lucene.spatial3d.geom.GeoBBoxFactory; +import org.apache.lucene.spatial3d.geom.GeoCircle; +import org.apache.lucene.spatial3d.geom.GeoStandardCircle; +import org.apache.lucene.spatial3d.geom.GeoPath; +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.PlanetModel; import org.junit.Test; public class Geo3dShapeWGS84ModelRectRelationTest extends Geo3dShapeRectRelationTestCase { diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DPoint.java similarity index 95% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DPoint.java index cde87f3c77ba..cd2c79a15d0f 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DPoint.java @@ -14,14 +14,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.GeoShape; +import org.apache.lucene.spatial3d.geom.PlanetModel; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.RamUsageEstimator; /** * Add this to a document to index lat/lon or x/y/z point, indexed as a 3D point. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DUtil.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DUtil.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DUtil.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DUtil.java index 34880a139ee3..0a0bf30039c8 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DUtil.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DUtil.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d; class Geo3DUtil { diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java index 9e2132d680db..9df8752b1353 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java @@ -14,10 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d; import java.io.IOException; +import org.apache.lucene.spatial3d.geom.BasePlanetObject; +import org.apache.lucene.spatial3d.geom.GeoArea; +import org.apache.lucene.spatial3d.geom.GeoAreaFactory; +import org.apache.lucene.spatial3d.geom.GeoShape; +import org.apache.lucene.spatial3d.geom.PlanetModel; import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues.Relation; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/ArcDistance.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/ArcDistance.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/ArcDistance.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/ArcDistance.java index c49fd1fe1ca7..bb60be07b719 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/ArcDistance.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/ArcDistance.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Arc distance computation style. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BasePlanetObject.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/BasePlanetObject.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/BasePlanetObject.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/BasePlanetObject.java index c64b974fd1fb..5cd5acca8ae9 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BasePlanetObject.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/BasePlanetObject.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * All Geo3D shapes can derive from this base class, which furnishes diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BaseXYZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/BaseXYZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/BaseXYZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/BaseXYZSolid.java index 52bd5da62093..16b52ccee75d 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/BaseXYZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/BaseXYZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Base class of a family of 3D rectangles, bounded on six sides by X,Y,Z limits diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Bounds.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Bounds.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/Bounds.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Bounds.java index 67172205f9b7..4f7c66309885 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Bounds.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Bounds.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * An interface for accumulating bounds information. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/DistanceStyle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/DistanceStyle.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/DistanceStyle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/DistanceStyle.java index 28056cb1841f..8c8658dda669 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/DistanceStyle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/DistanceStyle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Distance computation styles, supporting various ways of computing diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoArea.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoArea.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoArea.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoArea.java index 424e494ec147..5a6db0da6761 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoArea.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoArea.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * A GeoArea represents a standard 2-D breakdown of a part of sphere. It can diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoAreaFactory.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoAreaFactory.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoAreaFactory.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoAreaFactory.java index 24dd211f1f2a..0c3caa994398 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoAreaFactory.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoAreaFactory.java @@ -14,10 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** - * Factory for {@link org.apache.lucene.geo3d.GeoArea}. + * Factory for {@link GeoArea}. * * @lucene.experimental */ diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBBox.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBox.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBBox.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBox.java index 10a2388e9557..0ae242592d61 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBBox.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBox.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * All bounding box shapes have this interface in common. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBBoxFactory.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBoxFactory.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBBoxFactory.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBoxFactory.java index 9a02ab9d28e8..de7493e5c50e 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBBoxFactory.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBoxFactory.java @@ -14,10 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** - * Factory for {@link org.apache.lucene.geo3d.GeoBBox}. + * Factory for {@link GeoBBox}. * * @lucene.experimental */ diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseBBox.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseBBox.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseBBox.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseBBox.java index f40a0a14215e..7190cdccff1b 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseBBox.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseBBox.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * All bounding box shapes can derive from this base class, which furnishes diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseCircle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseCircle.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseCircle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseCircle.java index 8c306d7ac4cf..75219fd3d32a 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseCircle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseCircle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * GeoCircles have all the characteristics of GeoBaseDistanceShapes, plus GeoSizeable. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseDistanceShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseDistanceShape.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseDistanceShape.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseDistanceShape.java index 1c8306adb4a7..39dcf9616adf 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseDistanceShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseDistanceShape.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Distance shapes have capabilities of both geohashing and distance diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseMembershipShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseMembershipShape.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseMembershipShape.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseMembershipShape.java index a6bba8f4063e..831a7c6af9e3 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseMembershipShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseMembershipShape.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Membership shapes have capabilities of both geohashing and membership diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBasePolygon.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBasePolygon.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBasePolygon.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBasePolygon.java index 50ad0dcc9ca9..ba221ae88530 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBasePolygon.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBasePolygon.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * GeoBasePolygon objects are the base class of most GeoPolygon objects. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseShape.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseShape.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseShape.java index 146cfe857148..54896fc67f37 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoBaseShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseShape.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Base extended shape object. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCircle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCircle.java similarity index 95% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCircle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCircle.java index 154cdc4c084c..b05dff6af6c2 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCircle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCircle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Interface describing circular area with a center and radius. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCircleFactory.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCircleFactory.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCircleFactory.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCircleFactory.java index 2bb8ffc97fa6..ee75179eb997 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCircleFactory.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCircleFactory.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Class which constructs a GeoCircle representing an arbitrary circle. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCompositeMembershipShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeMembershipShape.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCompositeMembershipShape.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeMembershipShape.java index 25bdda096a1c..9747edad6fa6 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCompositeMembershipShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeMembershipShape.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; import java.util.List; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCompositePolygon.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositePolygon.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCompositePolygon.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositePolygon.java index b537590658b7..920d3fb9db40 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoCompositePolygon.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositePolygon.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * GeoCompositePolygon is a specific implementation of GeoMembershipShape, which implements GeoPolygon explicitly. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoConvexPolygon.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoConvexPolygon.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoConvexPolygon.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoConvexPolygon.java index fc07c4bb4ad2..fb024b6efee2 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoConvexPolygon.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoConvexPolygon.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; import java.util.BitSet; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateHorizontalLine.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateHorizontalLine.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateHorizontalLine.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateHorizontalLine.java index 6644f0db2b84..b7de0c2106ae 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateHorizontalLine.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateHorizontalLine.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Degenerate bounding box limited on two sides (left lon, right lon). diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateLatitudeZone.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateLatitudeZone.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateLatitudeZone.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateLatitudeZone.java index 489073389145..e79412312b50 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateLatitudeZone.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateLatitudeZone.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * This GeoBBox represents an area rectangle of one specific latitude with diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateLongitudeSlice.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateLongitudeSlice.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateLongitudeSlice.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateLongitudeSlice.java index b5eb90237de0..0bb7b90dfc2f 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateLongitudeSlice.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateLongitudeSlice.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Degenerate longitude slice. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegeneratePoint.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegeneratePoint.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegeneratePoint.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegeneratePoint.java index 63670d7f83bf..fcd20371004b 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegeneratePoint.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegeneratePoint.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * This class represents a degenerate point bounding box. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateVerticalLine.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateVerticalLine.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateVerticalLine.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateVerticalLine.java index f21f7747d04c..dff53b4aa1d2 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDegenerateVerticalLine.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegenerateVerticalLine.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Degenerate bounding box limited on two sides (top lat, bottom lat). diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDistance.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDistance.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDistance.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDistance.java index 899a687b431d..d41dd5146471 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDistance.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDistance.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * An implementer of this interface is capable of computing the described "distance" values, diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDistanceShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDistanceShape.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDistanceShape.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDistanceShape.java index 1e82f4828257..e7b0348c185d 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoDistanceShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDistanceShape.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Distance shapes have capabilities of both geohashing and distance diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoLatitudeZone.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoLatitudeZone.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoLatitudeZone.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoLatitudeZone.java index 3fc4423ec24d..912ca3205f84 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoLatitudeZone.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoLatitudeZone.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * This GeoBBox represents an area rectangle limited only in latitude. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoLongitudeSlice.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoLongitudeSlice.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoLongitudeSlice.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoLongitudeSlice.java index f5de7e766c8a..458cf8bef5bb 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoLongitudeSlice.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoLongitudeSlice.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box limited on left and right. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoMembershipShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoMembershipShape.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoMembershipShape.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoMembershipShape.java index 54b25513c558..2c479714ec34 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoMembershipShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoMembershipShape.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Membership shapes have capabilities of both geohashing and membership diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoNorthLatitudeZone.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoNorthLatitudeZone.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoNorthLatitudeZone.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoNorthLatitudeZone.java index 43338bbd6882..2c940618f3ff 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoNorthLatitudeZone.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoNorthLatitudeZone.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * This GeoBBox represents an area rectangle limited only in south latitude. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoNorthRectangle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoNorthRectangle.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoNorthRectangle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoNorthRectangle.java index 66b9480d681f..a2b6f1b9c8cb 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoNorthRectangle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoNorthRectangle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box limited on three sides (bottom lat, left lon, right lon), including diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoOutsideDistance.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoOutsideDistance.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoOutsideDistance.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoOutsideDistance.java index c1d784d387f5..717854c542fc 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoOutsideDistance.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoOutsideDistance.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Implemented by Geo3D shapes that can compute the distance from a point to the closest outside edge. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPath.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPath.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPath.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPath.java index bc5b9cf3f5ad..a5b8b9b06608 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPath.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPath.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; import java.util.Collections; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPoint.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPoint.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPoint.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPoint.java index e8a265d40a1a..31ab0aa2f0af 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPoint.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPoint.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * This class represents a point on the surface of a sphere or ellipsoid. @@ -117,7 +117,7 @@ public GeoPoint(final double x, final double y, final double z) { /** Compute an arc distance between two points. * Note: this is an angular distance, and not a surface distance, and is therefore independent of planet model. - * For surface distance, see {@link org.apache.lucene.geo3d.PlanetModel#surfaceDistance(GeoPoint, GeoPoint)} + * For surface distance, see {@link PlanetModel#surfaceDistance(GeoPoint, GeoPoint)} * @param v is the second point. * @return the angle, in radians, between the two points. */ diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPolygon.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygon.java similarity index 95% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPolygon.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygon.java index 634406d785ca..742bdf808d77 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPolygon.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygon.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * GeoPolygon interface description. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPolygonFactory.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygonFactory.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPolygonFactory.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygonFactory.java index 0dc70a5ebf61..8ee4290df3f6 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoPolygonFactory.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygonFactory.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; import java.util.BitSet; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoRectangle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoRectangle.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoRectangle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoRectangle.java index fc2a531f2524..1420c1116d20 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoRectangle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoRectangle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box limited on four sides (top lat, bottom lat, left lon, right lon). diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoShape.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoShape.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoShape.java index 21cdba3b9c70..a2d3947585ef 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoShape.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Generic shape. This describes methods that help GeoAreas figure out diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSizeable.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSizeable.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSizeable.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSizeable.java index e8c0ebb2e5ad..3c7e2efce0ea 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSizeable.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSizeable.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Some shapes can compute radii of a geocircle in which they are inscribed. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSouthLatitudeZone.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSouthLatitudeZone.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSouthLatitudeZone.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSouthLatitudeZone.java index 439dc1b96b83..a1d896740a72 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSouthLatitudeZone.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSouthLatitudeZone.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * This GeoBBox represents an area rectangle limited only in north latitude. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSouthRectangle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSouthRectangle.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSouthRectangle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSouthRectangle.java index eb6526be9836..806535e72783 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoSouthRectangle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoSouthRectangle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box limited on three sides (top lat, left lon, right lon). The diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoStandardCircle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardCircle.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoStandardCircle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardCircle.java index 0304d532f8a1..bbf50466b8a6 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoStandardCircle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardCircle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Circular area with a center and radius. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideDegenerateHorizontalLine.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideDegenerateHorizontalLine.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideDegenerateHorizontalLine.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideDegenerateHorizontalLine.java index a9af5b2f60ba..48a73afec72c 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideDegenerateHorizontalLine.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideDegenerateHorizontalLine.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Degenerate bounding box wider than PI and limited on two sides (left lon, right lon). diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideLongitudeSlice.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideLongitudeSlice.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideLongitudeSlice.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideLongitudeSlice.java index 64e4fa8b091a..1d6187649f3c 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideLongitudeSlice.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideLongitudeSlice.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box wider than PI but limited on left and right sides ( diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideNorthRectangle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideNorthRectangle.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideNorthRectangle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideNorthRectangle.java index 86de58420f9f..9f9dd49bab86 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideNorthRectangle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideNorthRectangle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box wider than PI but limited on three sides ( diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideRectangle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideRectangle.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideRectangle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideRectangle.java index 68397bba4737..c561747c57d1 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideRectangle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideRectangle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box wider than PI but limited on four sides (top lat, diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideSouthRectangle.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideSouthRectangle.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideSouthRectangle.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideSouthRectangle.java index 8bd72206551a..da9799ab0c34 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWideSouthRectangle.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWideSouthRectangle.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box wider than PI but limited on three sides (top lat, diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWorld.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWorld.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWorld.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWorld.java index 35ec4ae2d9f2..25bdc9641659 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/GeoWorld.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoWorld.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Bounding box including the entire world. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/LatLonBounds.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LatLonBounds.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/LatLonBounds.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LatLonBounds.java index 6478e0cc79b2..627fdaec996f 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/LatLonBounds.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LatLonBounds.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * An object for accumulating latitude/longitude bounds information. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/LinearDistance.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LinearDistance.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/LinearDistance.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LinearDistance.java index 9cbedba4b417..0c89a166b6c6 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/LinearDistance.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LinearDistance.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Linear distance computation style. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/LinearSquaredDistance.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LinearSquaredDistance.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/LinearSquaredDistance.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LinearSquaredDistance.java index 028d3c4222e6..3fc37da12899 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/LinearSquaredDistance.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/LinearSquaredDistance.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Linear squared distance computation style. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Membership.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Membership.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/Membership.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Membership.java index 3ca6b095ac67..0cf6ff0edd7c 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Membership.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Membership.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Implemented by Geo3D shapes that can calculate if a point is within it or not. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/NormalDistance.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/NormalDistance.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/NormalDistance.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/NormalDistance.java index cdac0d253bb3..50b2c7fc0b4d 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/NormalDistance.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/NormalDistance.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Normal distance computation style. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/NormalSquaredDistance.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/NormalSquaredDistance.java similarity index 97% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/NormalSquaredDistance.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/NormalSquaredDistance.java index 035fd40e58c4..a355d09a67a2 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/NormalSquaredDistance.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/NormalSquaredDistance.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Normal squared distance computation style. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Plane.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Plane.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/Plane.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Plane.java index 07d0c5b5f879..1f2c054791ba 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Plane.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Plane.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * We know about three kinds of planes. First kind: general plain through two points and origin diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PlanetModel.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java similarity index 98% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/PlanetModel.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java index 395fa1512b9d..d45d7761a8c3 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PlanetModel.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/PlanetModel.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Holds mathematical constants associated with the model of a planet. @@ -192,7 +192,7 @@ public boolean pointOutside(final double x, final double y, final double z) { * @param p1 is the first point. * @param p2 is the second point. * @return the adjusted angle, when multiplied by the mean earth radius, yields a surface distance. This will differ - * from GeoPoint.arcDistance() only when the planet model is not a sphere. @see {@link org.apache.lucene.geo3d.GeoPoint#arcDistance(GeoPoint)} + * from GeoPoint.arcDistance() only when the planet model is not a sphere. @see {@link GeoPoint#arcDistance(GeoPoint)} */ public double surfaceDistance(final GeoPoint p1, final GeoPoint p2) { final double latA = p1.getLatitude(); diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/SidedPlane.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/SidedPlane.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/SidedPlane.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/SidedPlane.java index 7fc543d24e56..e080bc04f40e 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/SidedPlane.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/SidedPlane.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Combination of a plane, and a sign value indicating what evaluation values are on the correct diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/StandardXYZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardXYZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/StandardXYZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardXYZSolid.java index cd542254cbd2..492f7b46a50c 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/StandardXYZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardXYZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Tools.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Tools.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/Tools.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Tools.java index 89d37aab844a..e8ee29e530e0 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Tools.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Tools.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Static methods globally useful for 3d geometric work. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Vector.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Vector.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/Vector.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Vector.java index 1a3972d3b0cd..3a1b233529a8 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Vector.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/Vector.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * A 3d vector in space, not necessarily diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZBounds.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZBounds.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZBounds.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZBounds.java index 22e324bb457f..c3ee53d4166c 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZBounds.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZBounds.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * An object for accumulating XYZ bounds information. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZSolid.java similarity index 95% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZSolid.java index ab4640295973..9298079b7b64 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * Interface for a family of 3D rectangles, bounded on six sides by X,Y,Z limits diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZSolidFactory.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZSolidFactory.java similarity index 96% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZSolidFactory.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZSolidFactory.java index 409ba86a0ff3..25ea4005044a 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYZSolidFactory.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZSolidFactory.java @@ -14,10 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** - * Factory for {@link org.apache.lucene.geo3d.XYZSolid}. + * Factory for {@link XYZSolid}. * * @lucene.experimental */ diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYdZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYdZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYdZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYdZSolid.java index e7cbe25e1316..66aac84d9161 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XYdZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYdZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits, degenerate in Z diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XdYZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XdYZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/XdYZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XdYZSolid.java index f6a2fa3931bd..d9e11b89fe6b 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XdYZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XdYZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits, degenerate in Y diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XdYdZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XdYdZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/XdYdZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XdYdZSolid.java index 562e5a6aed95..33d0beafc656 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/XdYdZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XdYdZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits, degenerate in Y and Z. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXYZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXYZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXYZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXYZSolid.java index 5a2a0063fe93..48fe714c28ad 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXYZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXYZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits, degenerate in X. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXYdZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXYdZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXYdZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXYdZSolid.java index 96b3004bc5ca..d824f2629750 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXYdZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXYdZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits, degenerate in X and Z. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXdYZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXdYZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXdYZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXdYZSolid.java index b58cd9261c23..b9942b5e6c9b 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXdYZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXdYZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits, degenerate in X and Y. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXdYdZSolid.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXdYdZSolid.java similarity index 99% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXdYdZSolid.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXdYdZSolid.java index b26cf6345953..66dcab8536e6 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/dXdYdZSolid.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/dXdYdZSolid.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; /** * 3D rectangle, bounded on six sides by X,Y,Z limits, degenerate in all dimensions diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/package-info.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/package-info.java new file mode 100644 index 000000000000..446365cbd24a --- /dev/null +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/package-info.java @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Shapes implemented using 3D planar geometry. This package has no dependencies aside from Java. + * This code was contributed under the name "Geo3D". + */ +package org.apache.lucene.spatial3d.geom; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/package-info.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/package-info.java similarity index 83% rename from lucene/spatial3d/src/java/org/apache/lucene/geo3d/package-info.java rename to lucene/spatial3d/src/java/org/apache/lucene/spatial3d/package-info.java index 2b6af740b51f..032d26f73934 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/package-info.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/package-info.java @@ -16,6 +16,6 @@ */ /** - * Shapes implemented using 3D planar geometry. + * Lucene field & query support for the spatial geometry implemented in {@link org.apache.lucene.spatial3d.geom}. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d; diff --git a/lucene/spatial3d/src/java/overview.html b/lucene/spatial3d/src/java/overview.html index 152d06e61101..293cc65aa60c 100644 --- a/lucene/spatial3d/src/java/overview.html +++ b/lucene/spatial3d/src/java/overview.html @@ -23,7 +23,8 @@

    The Spatial3D Module for Apache Lucene

    - APIs for planar spatial3d math. + APIs for planar spatial3d math. It is mostly comprised of computational geometry code in the + "org.apache.lucene.spatial3d.geom" package (AKA "Geo3D").

    diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java similarity index 98% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java index 17a40755d2fc..a4d8ed136316 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d; import java.io.IOException; import java.io.PrintWriter; @@ -36,6 +36,16 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.spatial3d.geom.GeoArea; +import org.apache.lucene.spatial3d.geom.GeoAreaFactory; +import org.apache.lucene.spatial3d.geom.GeoBBoxFactory; +import org.apache.lucene.spatial3d.geom.GeoCircleFactory; +import org.apache.lucene.spatial3d.geom.GeoPath; +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.GeoPolygonFactory; +import org.apache.lucene.spatial3d.geom.GeoShape; +import org.apache.lucene.spatial3d.geom.PlanetModel; +import org.apache.lucene.spatial3d.geom.XYZBounds; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; @@ -50,7 +60,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoBBoxTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoBBoxTest.java similarity index 99% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoBBoxTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoBBoxTest.java index b76134e43b31..f5a148fca9b7 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoBBoxTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoBBoxTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; import java.util.List; diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoCircleTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoCircleTest.java similarity index 99% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoCircleTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoCircleTest.java index aa5c2e3325bb..186bf4c650ee 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoCircleTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoCircleTest.java @@ -14,15 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +package org.apache.lucene.spatial3d.geom; import org.apache.lucene.util.LuceneTestCase; +import org.junit.Test; public class GeoCircleTest extends LuceneTestCase { diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoConvexPolygonTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoConvexPolygonTest.java similarity index 98% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoConvexPolygonTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoConvexPolygonTest.java index d6ca7ba621a6..a6ca404ceaae 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoConvexPolygonTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoConvexPolygonTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import org.junit.Test; diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoModelTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoModelTest.java similarity index 99% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoModelTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoModelTest.java index b3001d4df832..d5fcbdd5b89c 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoModelTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoModelTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import org.junit.Test; diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoPathTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPathTest.java similarity index 99% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoPathTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPathTest.java index fea7ed48ac44..37460699359e 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoPathTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPathTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import org.junit.Test; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d/GeoPointTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPointTest.java similarity index 69% rename from lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d/GeoPointTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPointTest.java index 444647404e5c..ed1792862be2 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/geo3d/GeoPointTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPointTest.java @@ -14,33 +14,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.spatial.spatial4j.geo3d; +package org.apache.lucene.spatial3d.geom; -import org.apache.lucene.geo3d.GeoPoint; -import org.apache.lucene.geo3d.PlanetModel; import org.apache.lucene.util.LuceneTestCase; import org.junit.Test; -import org.locationtech.spatial4j.distance.DistanceUtils; - import static com.carrotsearch.randomizedtesting.RandomizedTest.randomFloat; /** * Test basic GeoPoint functionality. */ public class GeoPointTest extends LuceneTestCase { + static final double DEGREES_TO_RADIANS = Math.PI / 180; @Test public void testConversion() { - testPointRoundTrip(PlanetModel.SPHERE, 90 * DistanceUtils.DEGREES_TO_RADIANS, 0, 1e-6); - testPointRoundTrip(PlanetModel.SPHERE, -90 * DistanceUtils.DEGREES_TO_RADIANS, 0, 1e-6); - testPointRoundTrip(PlanetModel.WGS84, 90 * DistanceUtils.DEGREES_TO_RADIANS, 0, 1e-6); - testPointRoundTrip(PlanetModel.WGS84, -90 * DistanceUtils.DEGREES_TO_RADIANS, 0, 1e-6); + testPointRoundTrip(PlanetModel.SPHERE, 90 * DEGREES_TO_RADIANS, 0, 1e-6); + testPointRoundTrip(PlanetModel.SPHERE, -90 * DEGREES_TO_RADIANS, 0, 1e-6); + testPointRoundTrip(PlanetModel.WGS84, 90 * DEGREES_TO_RADIANS, 0, 1e-6); + testPointRoundTrip(PlanetModel.WGS84, -90 * DEGREES_TO_RADIANS, 0, 1e-6); final int times = atLeast(100); for (int i = 0; i < times; i++) { - final double pLat = (randomFloat() * 180.0 - 90.0) * DistanceUtils.DEGREES_TO_RADIANS; - final double pLon = (randomFloat() * 360.0 - 180.0) * DistanceUtils.DEGREES_TO_RADIANS; + final double pLat = (randomFloat() * 180.0 - 90.0) * DEGREES_TO_RADIANS; + final double pLon = (randomFloat() * 360.0 - 180.0) * DEGREES_TO_RADIANS; testPointRoundTrip(PlanetModel.SPHERE, pLat, pLon, 1e-6);//1e-6 since there's a square root in there (Karl says) testPointRoundTrip(PlanetModel.WGS84, pLat, pLon, 1e-6); } @@ -60,10 +57,10 @@ protected void testPointRoundTrip(PlanetModel planetModel, double pLat, double p public void testSurfaceDistance() { final int times = atLeast(100); for (int i = 0; i < times; i++) { - final double p1Lat = (randomFloat() * 180.0 - 90.0) * DistanceUtils.DEGREES_TO_RADIANS; - final double p1Lon = (randomFloat() * 360.0 - 180.0) * DistanceUtils.DEGREES_TO_RADIANS; - final double p2Lat = (randomFloat() * 180.0 - 90.0) * DistanceUtils.DEGREES_TO_RADIANS; - final double p2Lon = (randomFloat() * 360.0 - 180.0) * DistanceUtils.DEGREES_TO_RADIANS; + final double p1Lat = (randomFloat() * 180.0 - 90.0) * DEGREES_TO_RADIANS; + final double p1Lon = (randomFloat() * 360.0 - 180.0) * DEGREES_TO_RADIANS; + final double p2Lat = (randomFloat() * 180.0 - 90.0) * DEGREES_TO_RADIANS; + final double p2Lon = (randomFloat() * 360.0 - 180.0) * DEGREES_TO_RADIANS; final GeoPoint p1 = new GeoPoint(PlanetModel.SPHERE, p1Lat, p1Lon); final GeoPoint p2 = new GeoPoint(PlanetModel.SPHERE, p2Lat, p2Lon); final double arcDistance = p1.arcDistance(p2); diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoPolygonTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPolygonTest.java similarity index 99% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoPolygonTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPolygonTest.java index f1511b96942d..d9b220de9252 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/GeoPolygonTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/GeoPolygonTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; import java.util.List; diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/PlaneTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/PlaneTest.java similarity index 98% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/PlaneTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/PlaneTest.java index 2ac3856bf7ec..91bd0c3f928e 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/PlaneTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/PlaneTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import org.junit.Test; diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/XYZSolidTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/XYZSolidTest.java similarity index 99% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/XYZSolidTest.java rename to lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/XYZSolidTest.java index 876a5256b116..98c616ebbff2 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/XYZSolidTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/XYZSolidTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo3d; +package org.apache.lucene.spatial3d.geom; import org.apache.lucene.util.LuceneTestCase; import org.junit.Test; From 16d98894ed41992eb5cfd5de11ae1e2ee72b97c9 Mon Sep 17 00:00:00 2001 From: David Smiley Date: Mon, 7 Mar 2016 23:48:31 -0500 Subject: [PATCH 0054/1113] SOLR-6926: Remove deprecated "ant example"; replaced by "ant server" a year ago (cherry picked from commit a0a571c) --- solr/build.xml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/solr/build.xml b/solr/build.xml index 218bf8ce6d39..61503dfb9a14 100644 --- a/solr/build.xml +++ b/solr/build.xml @@ -809,18 +809,4 @@ - - - - - ! ! ! NOTICE NOTICE NOTICE ! ! ! - - 'ant example' is no longer recomended - - Use 'ant server' instead - - 'ant example' is going to be removed at some point - - - From 761618727d7604b09ec4ecc68d65689c888311f1 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 7 Mar 2016 16:00:40 +0000 Subject: [PATCH 0055/1113] SOLR-8782: Improve async collections API --- solr/CHANGES.txt | 4 + .../configsets/cloud-minimal/conf/schema.xml | 32 + .../cloud-minimal/conf/solrconfig.xml | 48 ++ .../CollectionsAPIAsyncDistributedZkTest.java | 174 ++--- .../security/BasicAuthIntegrationTest.java | 2 +- .../solrj/request/CollectionAdminRequest.java | 691 ++++++++++++------ 6 files changed, 622 insertions(+), 329 deletions(-) create mode 100644 solr/core/src/test-files/solr/configsets/cloud-minimal/conf/schema.xml create mode 100644 solr/core/src/test-files/solr/configsets/cloud-minimal/conf/solrconfig.xml diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 0ebb488c2f35..ba2e737519cf 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -19,6 +19,10 @@ See the Quick Start guide at http://lucene.apache.org/solr/quickstart.html ================== 6.1.0 ================== Detailed Change List ---------------------- +* SOLR-8782: Add asynchronous sugar methods to the SolrJ Collections API. You + can now call .processAsync() to run a method asynchronously, or + .processAndWait() to wait for a call to finish without holding HTTP + collections open. (Alan Woodward) New Features ---------------------- diff --git a/solr/core/src/test-files/solr/configsets/cloud-minimal/conf/schema.xml b/solr/core/src/test-files/solr/configsets/cloud-minimal/conf/schema.xml new file mode 100644 index 000000000000..2a276af206a4 --- /dev/null +++ b/solr/core/src/test-files/solr/configsets/cloud-minimal/conf/schema.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + id + diff --git a/solr/core/src/test-files/solr/configsets/cloud-minimal/conf/solrconfig.xml b/solr/core/src/test-files/solr/configsets/cloud-minimal/conf/solrconfig.xml new file mode 100644 index 000000000000..059e58f447c2 --- /dev/null +++ b/solr/core/src/test-files/solr/configsets/cloud-minimal/conf/solrconfig.xml @@ -0,0 +1,48 @@ + + + + + + + + + ${solr.data.dir:} + + + + + ${tests.luceneMatchVersion:LATEST} + + + + ${solr.commitwithin.softcommit:true} + + + + + + + explicit + true + text + + + + + diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java index 493b2988853b..dcb115a1269b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java @@ -21,90 +21,80 @@ import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; -import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.CollectionAdminRequest.Create; import org.apache.solr.client.solrj.request.CollectionAdminRequest.SplitShard; import org.apache.solr.client.solrj.response.RequestStatusState; -import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.Slice; +import org.junit.BeforeClass; import org.junit.Test; /** * Tests the Cloud Collections API. */ @Slow -public class CollectionsAPIAsyncDistributedZkTest extends AbstractFullDistribZkTestBase { +public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase { + private static final int MAX_TIMEOUT_SECONDS = 60; - public CollectionsAPIAsyncDistributedZkTest() { - sliceCount = 1; + @BeforeClass + public static void setupCluster() throws Exception { + configureCluster(2) + .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .configure(); } @Test - @ShardsFixed(num = 1) public void testSolrJAPICalls() throws Exception { - try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) { - Create createCollectionRequest = new Create() - .setCollectionName("testasynccollectioncreation") - .setNumShards(1) - .setConfigName("conf1") - .setAsyncId("1001"); - createCollectionRequest.process(client); - - RequestStatusState state = getRequestStateAfterCompletion("1001", MAX_TIMEOUT_SECONDS, client); - - assertSame("CreateCollection task did not complete!", RequestStatusState.COMPLETED, state); - - createCollectionRequest = new Create() - .setCollectionName("testasynccollectioncreation") - .setNumShards(1) - .setConfigName("conf1") - .setAsyncId("1002"); - createCollectionRequest.process(client); - - state = getRequestStateAfterCompletion("1002", MAX_TIMEOUT_SECONDS, client); - - assertSame("Recreating a collection with the same should have failed.", RequestStatusState.FAILED, state); - - CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica() - .setCollectionName("testasynccollectioncreation") - .setShardName("shard1") - .setAsyncId("1003"); - client.request(addReplica); - state = getRequestStateAfterCompletion("1003", MAX_TIMEOUT_SECONDS, client); - assertSame("Add replica did not complete", RequestStatusState.COMPLETED, state); - - SplitShard splitShardRequest = new SplitShard() - .setCollectionName("testasynccollectioncreation") - .setShardName("shard1") - .setAsyncId("1004"); - splitShardRequest.process(client); - - state = getRequestStateAfterCompletion("1004", MAX_TIMEOUT_SECONDS * 2, client); - - assertEquals("Shard split did not complete. Last recorded state: " + state, RequestStatusState.COMPLETED, state); - } + + final CloudSolrClient client = cluster.getSolrClient(); + + RequestStatusState state = new Create() + .setCollectionName("testasynccollectioncreation") + .setNumShards(1) + .setReplicationFactor(1) + .setConfigName("conf1") + .processAndWait(client, MAX_TIMEOUT_SECONDS); + assertSame("CreateCollection task did not complete!", RequestStatusState.COMPLETED, state); + + state = new Create() + .setCollectionName("testasynccollectioncreation") + .setNumShards(1) + .setConfigName("conf1") + .processAndWait(client, MAX_TIMEOUT_SECONDS); + assertSame("Recreating a collection with the same should have failed.", RequestStatusState.FAILED, state); + + state = new CollectionAdminRequest.AddReplica() + .setCollectionName("testasynccollectioncreation") + .setShardName("shard1") + .processAndWait(client, MAX_TIMEOUT_SECONDS); + assertSame("Add replica did not complete", RequestStatusState.COMPLETED, state); + + state = new SplitShard() + .setCollectionName("testasynccollectioncreation") + .setShardName("shard1") + .processAndWait(client, MAX_TIMEOUT_SECONDS * 2); + assertEquals("Shard split did not complete. Last recorded state: " + state, RequestStatusState.COMPLETED, state); + } @Test public void testAsyncRequests() throws Exception { - String collection = "testAsyncOperations"; - Create createCollectionRequest = new Create() + final String collection = "testAsyncOperations"; + final CloudSolrClient client = cluster.getSolrClient(); + + RequestStatusState state = new Create() .setCollectionName(collection) .setNumShards(1) .setRouterName("implicit") .setShards("shard1") .setConfigName("conf1") - .setAsyncId("42"); - CollectionAdminResponse response = createCollectionRequest.process(cloudClient); - assertEquals("42", response.getResponse().get("requestid")); - RequestStatusState state = getRequestStateAfterCompletion("42", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("CreateCollection task did not complete!", RequestStatusState.COMPLETED, state); //Add a few documents to shard1 @@ -116,59 +106,48 @@ public void testAsyncRequests() throws Exception { doc.addField("_route_", "shard1"); docs.add(doc); } - cloudClient.add(collection, docs); - cloudClient.commit(collection); + client.add(collection, docs); + client.commit(collection); SolrQuery query = new SolrQuery("*:*"); query.set("shards", "shard1"); - assertEquals(numDocs, cloudClient.query(collection, query).getResults().getNumFound()); + assertEquals(numDocs, client.query(collection, query).getResults().getNumFound()); - CollectionAdminRequest.Reload reloadCollection = new CollectionAdminRequest.Reload(); - reloadCollection.setCollectionName(collection).setAsyncId("43"); - response = reloadCollection.process(cloudClient); - assertEquals("43", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("43", MAX_TIMEOUT_SECONDS, cloudClient); + state = new CollectionAdminRequest.Reload() + .setCollectionName(collection) + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("ReloadCollection did not complete", RequestStatusState.COMPLETED, state); - CollectionAdminRequest.CreateShard createShard = new CollectionAdminRequest.CreateShard() + state = new CollectionAdminRequest.CreateShard() .setCollectionName(collection) .setShardName("shard2") - .setAsyncId("44"); - response = createShard.process(cloudClient); - assertEquals("44", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("44", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("CreateShard did not complete", RequestStatusState.COMPLETED, state); //Add a doc to shard2 to make sure shard2 was created properly SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", numDocs + 1); doc.addField("_route_", "shard2"); - cloudClient.add(collection, doc); - cloudClient.commit(collection); + client.add(collection, doc); + client.commit(collection); query = new SolrQuery("*:*"); query.set("shards", "shard2"); - assertEquals(1, cloudClient.query(collection, query).getResults().getNumFound()); + assertEquals(1, client.query(collection, query).getResults().getNumFound()); - CollectionAdminRequest.DeleteShard deleteShard = new CollectionAdminRequest.DeleteShard() + state = new CollectionAdminRequest.DeleteShard() .setCollectionName(collection) .setShardName("shard2") - .setAsyncId("45"); - response = deleteShard.process(cloudClient); - assertEquals("45", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("45", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteShard did not complete", RequestStatusState.COMPLETED, state); - CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica() + state = new CollectionAdminRequest.AddReplica() .setCollectionName(collection) .setShardName("shard1") - .setAsyncId("46"); - response = addReplica.process(cloudClient); - assertEquals("46", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("46", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("AddReplica did not complete", RequestStatusState.COMPLETED, state); //cloudClient watch might take a couple of seconds to reflect it - Slice shard1 = cloudClient.getZkStateReader().getClusterState().getSlice(collection, "shard1"); + Slice shard1 = client.getZkStateReader().getClusterState().getSlice(collection, "shard1"); int count = 0; while (shard1.getReplicas().size() != 2) { if (count++ > 1000) { @@ -177,51 +156,40 @@ public void testAsyncRequests() throws Exception { Thread.sleep(100); } - CollectionAdminRequest.CreateAlias createAlias = new CollectionAdminRequest.CreateAlias() + state = new CollectionAdminRequest.CreateAlias() .setAliasName("myalias") .setAliasedCollections(collection) - .setAsyncId("47"); - response = createAlias.process(cloudClient); - assertEquals("47", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("47", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("CreateAlias did not complete", RequestStatusState.COMPLETED, state); query = new SolrQuery("*:*"); query.set("shards", "shard1"); - assertEquals(numDocs, cloudClient.query("myalias", query).getResults().getNumFound()); + assertEquals(numDocs, client.query("myalias", query).getResults().getNumFound()); - CollectionAdminRequest.DeleteAlias deleteAlias = new CollectionAdminRequest.DeleteAlias() + state = new CollectionAdminRequest.DeleteAlias() .setAliasName("myalias") - .setAsyncId("48"); - response = deleteAlias.process(cloudClient); - assertEquals("48", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("48", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteAlias did not complete", RequestStatusState.COMPLETED, state); try { - cloudClient.query("myalias", query); + client.query("myalias", query); fail("Alias should not exist"); } catch (SolrException e) { //expected } String replica = shard1.getReplicas().iterator().next().getName(); - CollectionAdminRequest.DeleteReplica deleteReplica = new CollectionAdminRequest.DeleteReplica() + state = new CollectionAdminRequest.DeleteReplica() .setCollectionName(collection) .setShardName("shard1") .setReplica(replica) - .setAsyncId("47"); - response = deleteReplica.process(cloudClient); - assertEquals("47", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("47", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteReplica did not complete", RequestStatusState.COMPLETED, state); - CollectionAdminRequest.Delete deleteCollection = new CollectionAdminRequest.Delete() + state = new CollectionAdminRequest.Delete() .setCollectionName(collection) - .setAsyncId("48"); - response = deleteCollection.process(cloudClient); - assertEquals("48", response.getResponse().get("requestid")); - state = getRequestStateAfterCompletion("48", MAX_TIMEOUT_SECONDS, cloudClient); + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteCollection did not complete", RequestStatusState.COMPLETED, state); } + } diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java index c5d27a9a544e..ab02a3e1eacd 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java @@ -161,7 +161,7 @@ protected void doExtraTests(MiniSolrCloudCluster miniCluster, SolrZkClient zkCli verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[2]/name", "collection-admin-edit", 20); CollectionAdminRequest.Reload reload = new CollectionAdminRequest.Reload(); - reload.setCollectionName(cloudSolrClient.getDefaultCollection()); + reload.setCollectionName(defaultCollName); HttpSolrClient solrClient = new HttpSolrClient(baseUrl); try { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java index a7d71ca5b93e..c9c8c3989dfb 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java @@ -21,10 +21,15 @@ import java.util.Iterator; import java.util.Map; import java.util.Properties; +import java.util.UUID; +import java.util.concurrent.TimeUnit; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.client.solrj.SolrResponse; +import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.CollectionAdminResponse; +import org.apache.solr.client.solrj.response.RequestStatusState; import org.apache.solr.client.solrj.util.SolrIdentifierValidator; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.DocCollection; @@ -37,33 +42,28 @@ import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ContentStream; +import org.apache.solr.common.util.NamedList; /** * This class is experimental and subject to change. * * @since solr 4.5 */ -public abstract class CollectionAdminRequest > extends SolrRequest { +public abstract class CollectionAdminRequest extends SolrRequest { - protected CollectionAction action = null; + protected final CollectionAction action; private static String PROPERTY_PREFIX = "property."; - protected CollectionAdminRequest setAction(CollectionAction action) { - this.action = action; - return this; - } - - public CollectionAdminRequest() { - super(METHOD.GET, "/admin/collections"); + public CollectionAdminRequest(CollectionAction action) { + this("/admin/collections", action); } - public CollectionAdminRequest(String path) { + public CollectionAdminRequest(String path, CollectionAction action) { super(METHOD.GET, path); + this.action = action; } - protected abstract Q getThis(); - @Override public SolrParams getParams() { if (action == null) { @@ -79,11 +79,6 @@ public Collection getContentStreams() throws IOException { return null; } - @Override - protected CollectionAdminResponse createResponse(SolrClient client) { - return new CollectionAdminResponse(); - } - protected void addProperties(ModifiableSolrParams params, Properties props) { Iterator> iter = props.entrySet().iterator(); while(iter.hasNext()) { @@ -94,18 +89,84 @@ protected void addProperties(ModifiableSolrParams params, Properties props) { } } - protected abstract static class AsyncCollectionAdminRequest > extends CollectionAdminRequest { - protected String asyncId = null; + protected abstract static class AsyncCollectionAdminRequest extends CollectionAdminRequest { - public final T setAsyncId(String asyncId) { - this.asyncId = asyncId; - return getThis(); + public AsyncCollectionAdminRequest(CollectionAction action) { + super(action); } + @Override + protected CollectionAdminResponse createResponse(SolrClient client) { + return new CollectionAdminResponse(); + } + + private static String generateAsyncId() { + return UUID.randomUUID().toString(); + } + + protected String asyncId = null; + public String getAsyncId() { return asyncId; } + /** + * @deprecated Use {@link #processAsync(String, SolrClient)} or {@link #processAsync(SolrClient)} + */ + @Deprecated + public abstract AsyncCollectionAdminRequest setAsyncId(String id); + + /** + * Process this request asynchronously, generating and returning a request id + * @param client a Solr client + * @return the request id + * @see CollectionAdminRequest.RequestStatus + */ + public String processAsync(SolrClient client) throws IOException, SolrServerException { + return processAsync(generateAsyncId(), client); + } + + /** + * Process this request asynchronously, using a specified request id + * @param asyncId the request id + * @param client a Solr client + * @return the request id + */ + public String processAsync(String asyncId, SolrClient client) throws IOException, SolrServerException { + this.asyncId = asyncId; + NamedList resp = client.request(this); + if (resp.get("error") != null) { + throw new SolrServerException((String)resp.get("error")); + } + return (String) resp.get("requestid"); + } + + /** + * Send this request to a Solr server, and wait (up to a timeout) for the request to + * complete or fail + * @param client a Solr client + * @param timeoutSeconds the maximum time to wait + * @return the status of the request on completion or timeout + */ + public RequestStatusState processAndWait(SolrClient client, long timeoutSeconds) + throws SolrServerException, InterruptedException, IOException { + return processAndWait(generateAsyncId(), client, timeoutSeconds); + } + + /** + * Send this request to a Solr server, and wait (up to a timeout) for the request to + * complete or fail + * @param asyncId an id for the request + * @param client a Solr client + * @param timeoutSeconds the maximum time to wait + * @return the status of the request on completion or timeout + */ + public RequestStatusState processAndWait(String asyncId, SolrClient client, long timeoutSeconds) + throws IOException, SolrServerException, InterruptedException { + processAsync(asyncId, client); + return new RequestStatus().setRequestId(asyncId).waitFor(client, timeoutSeconds); + } + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); @@ -116,121 +177,110 @@ public SolrParams getParams() { } } - //--------------------------------------------------------------------------------------- - // - //--------------------------------------------------------------------------------------- + protected abstract static class AsyncCollectionSpecificAdminRequest extends AsyncCollectionAdminRequest { - protected abstract static class CollectionSpecificAdminRequest > extends CollectionAdminRequest { - protected String collection = null; + protected String collection; - public T setCollectionName(String collectionName) { - this.collection = collectionName; - return getThis(); + public AsyncCollectionSpecificAdminRequest(CollectionAction action) { + super(action); } - public final String getCollectionName() { - return collection; - } + public abstract AsyncCollectionSpecificAdminRequest setCollectionName(String collection); @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); + if (collection == null) + throw new IllegalArgumentException("You must call setCollectionName() on this request"); params.set(CoreAdminParams.NAME, collection); return params; } } - protected abstract static class CollectionSpecificAsyncAdminRequest> extends CollectionSpecificAdminRequest { - protected String asyncId = null; + protected abstract static class AsyncShardSpecificAdminRequest extends AsyncCollectionAdminRequest { - public final T setAsyncId(String asyncId) { - this.asyncId = asyncId; - return getThis(); - } + protected String collection; + protected String shard; - public String getAsyncId() { - return asyncId; + public AsyncShardSpecificAdminRequest(CollectionAction action) { + super(action); } + public abstract AsyncShardSpecificAdminRequest setCollectionName(String collection); + + public abstract AsyncShardSpecificAdminRequest setShardName(String shard); + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); - if (asyncId != null) { - params.set(CommonAdminParams.ASYNC, asyncId); - } + if (collection == null) + throw new IllegalArgumentException("You must call setCollectionName() on this request"); + if (shard == null) + throw new IllegalArgumentException("You must call setShardName() on this request"); + params.set(CoreAdminParams.COLLECTION, collection); + params.set(CoreAdminParams.SHARD, shard); return params; } } - protected abstract static class CollectionShardAdminRequest > extends CollectionAdminRequest { - protected String shardName = null; - protected String collection = null; + protected abstract static class ShardSpecificAdminRequest extends CollectionAdminRequest { - public T setCollectionName(String collectionName) { - this.collection = collectionName; - return getThis(); - } + protected String collection; + protected String shard; - public String getCollectionName() { - return collection; + public ShardSpecificAdminRequest(CollectionAction action) { + super(action); } - public T setShardName(String shard) { - this.shardName = shard; - return getThis(); - } + public abstract ShardSpecificAdminRequest setCollectionName(String collection); - public String getShardName() { - return this.shardName; - } + public abstract ShardSpecificAdminRequest setShardName(String shard); @Override public SolrParams getParams() { - ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); + ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); + if (collection == null) + throw new IllegalArgumentException("You must call setCollectionName() on this request"); + if (shard == null) + throw new IllegalArgumentException("You must call setShardName() on this request"); params.set(CoreAdminParams.COLLECTION, collection); - params.set(CoreAdminParams.SHARD, shardName); + params.set(CoreAdminParams.SHARD, shard); return params; } + + @Override + protected SolrResponse createResponse(SolrClient client) { + return new CollectionAdminResponse(); + } } - protected abstract static class CollectionShardAsyncAdminRequest> extends CollectionShardAdminRequest { - protected String asyncId = null; + //--------------------------------------------------------------------------------------- + // + //--------------------------------------------------------------------------------------- - public final T setAsyncId(String asyncId) { - this.asyncId = asyncId; - return getThis(); - } - public String getAsyncId() { - return asyncId; + protected abstract static class CollectionAdminRoleRequest extends AsyncCollectionAdminRequest { + + protected String node; + protected String role; + + public CollectionAdminRoleRequest(CollectionAction action) { + super(action); } @Override - public SolrParams getParams() { - ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); - if (asyncId != null) { - params.set(CommonAdminParams.ASYNC, asyncId); - } - return params; + public CollectionAdminRoleRequest setAsyncId(String id) { + this.asyncId = id; + return this; } - } - protected abstract static class CollectionAdminRoleRequest > extends AsyncCollectionAdminRequest { - protected String node; - protected String role; - public T setNode(String node) { - this.node = node; - return getThis(); - } + public abstract CollectionAdminRoleRequest setNode(String node); public String getNode() { return this.node; } - public T setRole(String role) { - this.role = role; - return getThis(); - } + public abstract CollectionAdminRoleRequest setRole(String role); public String getRole() { return this.role; @@ -249,7 +299,8 @@ public SolrParams getParams() { /** Specific Collection API call implementations **/ // CREATE request - public static class Create extends CollectionSpecificAsyncAdminRequest { + public static class Create extends AsyncCollectionSpecificAdminRequest { + protected String configName = null; protected String createNodeSet = null; protected String routerName; @@ -263,8 +314,9 @@ public static class Create extends CollectionSpecificAsyncAdminRequest { protected Boolean autoAddReplicas; protected Integer stateFormat; private String[] rule , snitch; + public Create() { - action = CollectionAction.CREATE; + super(CollectionAction.CREATE); } public Create setConfigName(String config) { this.configName = config; return this; } @@ -314,7 +366,6 @@ public Create setShards(String shards) { * * @throws IllegalArgumentException if the collection name contains invalid characters. */ - @Override public Create setCollectionName(String collectionName) throws SolrException { if (!SolrIdentifierValidator.validateCollectionName(collectionName)) { throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.COLLECTION, @@ -324,6 +375,12 @@ public Create setCollectionName(String collectionName) throws SolrException { return this; } + @Override + public Create setAsyncId(String id) { + this.asyncId = id; + return this; + } + public Properties getProperties() { return properties; } @@ -337,8 +394,8 @@ public Create setProperties(Properties properties) { public SolrParams getParams() { ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); - params.set( "collection.configName", configName); - params.set( "createNodeSet", createNodeSet); + params.set("collection.configName", configName); + params.set("createNodeSet", createNodeSet); if (numShards != null) { params.set( ZkStateReader.NUM_SHARDS_PROP, numShards); } @@ -367,51 +424,51 @@ public SolrParams getParams() { return params; } - @Override - protected Create getThis() { - return this; - } } // RELOAD request - public static class Reload extends CollectionSpecificAsyncAdminRequest { + public static class Reload extends AsyncCollectionSpecificAdminRequest { + public Reload() { - action = CollectionAction.RELOAD; + super(CollectionAction.RELOAD); } @Override - public SolrParams getParams() { - ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); - return params; + public Reload setCollectionName(String collection) { + this.collection = collection; + return this; } @Override - protected Reload getThis() { + public Reload setAsyncId(String id) { + this.asyncId = id; return this; } } // DELETE request - public static class Delete extends CollectionSpecificAsyncAdminRequest { + public static class Delete extends AsyncCollectionSpecificAdminRequest { public Delete() { - action = CollectionAction.DELETE; + super(CollectionAction.DELETE); } @Override - public SolrParams getParams() { - ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); - return params; + public Delete setCollectionName(String collection) { + this.collection = collection; + return this; } @Override - protected Delete getThis() { + public Delete setAsyncId(String id) { + this.asyncId = id; return this; } } // CREATESHARD request - public static class CreateShard extends CollectionShardAsyncAdminRequest { + public static class CreateShard extends AsyncShardSpecificAdminRequest { + protected String nodeSet; protected Properties properties; @@ -434,9 +491,15 @@ public CreateShard setProperties(Properties properties) { } public CreateShard() { - action = CollectionAction.CREATESHARD; + super(CollectionAction.CREATESHARD); } - + + @Override + public CreateShard setCollectionName(String collection) { + this.collection = collection; + return this; + } + /** * Provide the name of the shard to be created. * @@ -450,7 +513,13 @@ public CreateShard setShardName(String shardName) { throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD, shardName)); } - this.shardName = shardName; + this.shard = shardName; + return this; + } + + @Override + public CreateShard setAsyncId(String id) { + this.asyncId = id; return this; } @@ -466,21 +535,18 @@ public SolrParams getParams() { return params; } - @Override - protected CreateShard getThis() { - return this; - } + } // SPLITSHARD request - public static class SplitShard extends CollectionShardAsyncAdminRequest { + public static class SplitShard extends AsyncShardSpecificAdminRequest { protected String ranges; protected String splitKey; private Properties properties; public SplitShard() { - action = CollectionAction.SPLITSHARD; + super(CollectionAction.SPLITSHARD); } public SplitShard setRanges(String ranges) { this.ranges = ranges; return this; } @@ -504,6 +570,24 @@ public SplitShard setProperties(Properties properties) { return this; } + @Override + public SplitShard setCollectionName(String collection) { + this.collection = collection; + return this; + } + + @Override + public SplitShard setShardName(String shard) { + this.shard = shard; + return this; + } + + @Override + public SplitShard setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); @@ -518,25 +602,16 @@ public SolrParams getParams() { return params; } - @Override - protected SplitShard getThis() { - return this; - } } // DELETESHARD request - public static class DeleteShard extends CollectionShardAsyncAdminRequest { + public static class DeleteShard extends AsyncShardSpecificAdminRequest { private Boolean deleteInstanceDir; private Boolean deleteDataDir; public DeleteShard() { - action = CollectionAction.DELETESHARD; - } - - @Override - protected DeleteShard getThis() { - return this; + super(CollectionAction.DELETESHARD); } public Boolean getDeleteInstanceDir() { @@ -557,6 +632,24 @@ public DeleteShard setDeleteDataDir(Boolean deleteDataDir) { return this; } + @Override + public DeleteShard setCollectionName(String collection) { + this.collection = collection; + return this; + } + + @Override + public DeleteShard setShardName(String shard) { + this.shard = shard; + return this; + } + + @Override + public DeleteShard setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); @@ -571,24 +664,43 @@ public SolrParams getParams() { } // FORCELEADER request - public static class ForceLeader extends CollectionShardAdminRequest { + public static class ForceLeader extends ShardSpecificAdminRequest { public ForceLeader() { - action = CollectionAction.FORCELEADER; + super(CollectionAction.FORCELEADER); + } + + + @Override + public ForceLeader setCollectionName(String collection) { + this.collection = collection; + return this; } @Override - protected ForceLeader getThis() { + public ForceLeader setShardName(String shard) { + this.shard = shard; return this; } + + } + + public static class RequestStatusResponse extends CollectionAdminResponse { + + public RequestStatusState getRequestStatus() { + NamedList innerResponse = (NamedList) getResponse().get("status"); + return RequestStatusState.fromKey((String) innerResponse.get("state")); + } + } // REQUESTSTATUS request - public static class RequestStatus extends CollectionAdminRequest { - protected String requestId = null; + public static class RequestStatus extends CollectionAdminRequest { + + protected String requestId = null; public RequestStatus() { - action = CollectionAction.REQUESTSTATUS; + super(CollectionAction.REQUESTSTATUS); } public RequestStatus setRequestId(String requestId) { @@ -603,23 +715,41 @@ public String getRequestId() { @Override public SolrParams getParams() { ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); + if (requestId == null) + throw new IllegalArgumentException("You must call setRequestId() on this request"); params.set(CoreAdminParams.REQUESTID, requestId); return params; } @Override - protected RequestStatus getThis() { - return this; + protected RequestStatusResponse createResponse(SolrClient client) { + return new RequestStatusResponse(); + } + + public RequestStatusState waitFor(SolrClient client, long timeoutSeconds) + throws IOException, SolrServerException, InterruptedException { + long finishTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(timeoutSeconds); + RequestStatusState state = RequestStatusState.NOT_FOUND; + while (System.nanoTime() < finishTime) { + state = this.process(client).getRequestStatus(); + if (state == RequestStatusState.COMPLETED || state == RequestStatusState.FAILED) { + new DeleteStatus().setRequestId(requestId).process(client); + return state; + } + TimeUnit.SECONDS.sleep(1); + } + return state; } } // DELETESTATUS request - public static class DeleteStatus extends CollectionAdminRequest { + public static class DeleteStatus extends CollectionAdminRequest { + protected String requestId = null; protected Boolean flush = null; public DeleteStatus() { - action = CollectionAction.DELETESTATUS; + super(CollectionAction.DELETESTATUS); } public DeleteStatus setRequestId(String requestId) { @@ -652,18 +782,20 @@ public SolrParams getParams() { } @Override - protected DeleteStatus getThis() { - return this; + protected CollectionAdminResponse createResponse(SolrClient client) { + return new CollectionAdminResponse(); } + } // CREATEALIAS request - public static class CreateAlias extends AsyncCollectionAdminRequest { + public static class CreateAlias extends AsyncCollectionAdminRequest { + protected String aliasName; protected String aliasedCollections; public CreateAlias() { - action = CollectionAction.CREATEALIAS; + super(CollectionAction.CREATEALIAS); } /** @@ -695,6 +827,12 @@ public String getAliasedCollections() { return this.aliasedCollections; } + @Override + public CreateAlias setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); @@ -703,18 +841,15 @@ public SolrParams getParams() { return params; } - @Override - protected CreateAlias getThis() { - return this; - } } // DELETEALIAS request - public static class DeleteAlias extends AsyncCollectionAdminRequest { + public static class DeleteAlias extends AsyncCollectionAdminRequest { + protected String aliasName; public DeleteAlias() { - action = CollectionAction.DELETEALIAS; + super(CollectionAction.DELETEALIAS); } public DeleteAlias setAliasName(String aliasName) { @@ -722,6 +857,12 @@ public DeleteAlias setAliasName(String aliasName) { return this; } + @Override + public DeleteAlias setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); @@ -729,14 +870,14 @@ public SolrParams getParams() { return params; } - @Override - protected DeleteAlias getThis() { - return this; - } + } // ADDREPLICA request - public static class AddReplica extends CollectionShardAsyncAdminRequest { + public static class AddReplica extends AsyncCollectionAdminRequest { + + protected String collection; + protected String shard; protected String node; protected String routeKey; protected String instanceDir; @@ -744,7 +885,7 @@ public static class AddReplica extends CollectionShardAsyncAdminRequest { + public static class DeleteReplica extends AsyncShardSpecificAdminRequest { + protected String replica; protected Boolean onlyIfDown; private Boolean deleteDataDir; @@ -832,7 +992,7 @@ public static class DeleteReplica extends CollectionShardAsyncAdminRequest { + public static class ClusterProp extends CollectionAdminRequest { + private String propertyName; private String propertyValue; public ClusterProp() { - this.action = CollectionAction.CLUSTERPROP; + super(CollectionAction.CLUSTERPROP); } public ClusterProp setPropertyName(String propertyName) { @@ -934,13 +1108,16 @@ public SolrParams getParams() { } @Override - protected ClusterProp getThis() { - return this; + protected CollectionAdminResponse createResponse(SolrClient client) { + return new CollectionAdminResponse(); } + + } // MIGRATE request - public static class Migrate extends AsyncCollectionAdminRequest { + public static class Migrate extends AsyncCollectionAdminRequest { + private String collection; private String targetCollection; private String splitKey; @@ -948,7 +1125,7 @@ public static class Migrate extends AsyncCollectionAdminRequest { private Properties properties; public Migrate() { - action = CollectionAction.MIGRATE; + super(CollectionAction.MIGRATE); } public Migrate setCollectionName(String collection) { @@ -996,6 +1173,12 @@ public Properties getProperties() { return this.properties; } + @Override + public Migrate setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); @@ -1012,58 +1195,72 @@ public SolrParams getParams() { return params; } - @Override - protected Migrate getThis() { - return this; - } + } // ADDROLE request - public static class AddRole extends CollectionAdminRoleRequest { + public static class AddRole extends CollectionAdminRoleRequest { + public AddRole() { - action = CollectionAction.ADDROLE; + super(CollectionAction.ADDROLE); } @Override - protected AddRole getThis() { + public AddRole setNode(String node) { + this.node = node; + return this; + } + + @Override + public AddRole setRole(String role) { + this.role = role; return this; } } // REMOVEROLE request - public static class RemoveRole extends CollectionAdminRoleRequest { + public static class RemoveRole extends CollectionAdminRoleRequest { + public RemoveRole() { - action = CollectionAction.REMOVEROLE; + super(CollectionAction.REMOVEROLE); } @Override - protected RemoveRole getThis() { + public RemoveRole setNode(String node) { + this.node = node; + return this; + } + + @Override + public RemoveRole setRole(String role) { + this.role = role; return this; } } // OVERSEERSTATUS request - public static class OverseerStatus extends AsyncCollectionAdminRequest { + public static class OverseerStatus extends AsyncCollectionAdminRequest { public OverseerStatus () { - action = CollectionAction.OVERSEERSTATUS; + super(CollectionAction.OVERSEERSTATUS); } @Override - protected OverseerStatus getThis() { + public OverseerStatus setAsyncId(String id) { + this.asyncId = id; return this; } } // CLUSTERSTATUS request - public static class ClusterStatus extends CollectionAdminRequest { + public static class ClusterStatus extends CollectionAdminRequest { protected String shardName = null; protected String collection = null; protected String routeKey = null; public ClusterStatus () { - action = CollectionAction.CLUSTERSTATUS; + super(CollectionAction.CLUSTERSTATUS); } public ClusterStatus setCollectionName(String collectionName) { @@ -1109,32 +1306,35 @@ public SolrParams getParams() { } @Override - protected ClusterStatus getThis() { - return this; + protected CollectionAdminResponse createResponse(SolrClient client) { + return new CollectionAdminResponse(); } + + } // LIST request - public static class List extends CollectionAdminRequest { + public static class List extends CollectionAdminRequest { public List () { - action = CollectionAction.LIST; + super(CollectionAction.LIST); } @Override - protected List getThis() { - return this; + protected CollectionAdminResponse createResponse(SolrClient client) { + return new CollectionAdminResponse(); } } // ADDREPLICAPROP request - public static class AddReplicaProp extends CollectionShardAsyncAdminRequest { + public static class AddReplicaProp extends AsyncShardSpecificAdminRequest { + private String replica; private String propertyName; private String propertyValue; private Boolean shardUnique; public AddReplicaProp() { - action = CollectionAction.ADDREPLICAPROP; + super(CollectionAction.ADDREPLICAPROP); } public String getReplica() { @@ -1173,6 +1373,24 @@ public AddReplicaProp setShardUnique(Boolean shardUnique) { return this; } + @Override + public AddReplicaProp setCollectionName(String collection) { + this.collection = collection; + return this; + } + + @Override + public AddReplicaProp setShardName(String shard) { + this.shard = shard; + return this; + } + + @Override + public AddReplicaProp setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); @@ -1187,19 +1405,16 @@ public SolrParams getParams() { return params; } - @Override - protected AddReplicaProp getThis() { - return this; - } } // DELETEREPLICAPROP request - public static class DeleteReplicaProp extends CollectionShardAsyncAdminRequest { + public static class DeleteReplicaProp extends AsyncShardSpecificAdminRequest { + private String replica; private String propertyName; public DeleteReplicaProp() { - this.action = CollectionAction.DELETEREPLICAPROP; + super(CollectionAction.DELETEREPLICAPROP); } public String getReplica() { @@ -1220,6 +1435,24 @@ public DeleteReplicaProp setPropertyName(String propertyName) { return this; } + @Override + public DeleteReplicaProp setCollectionName(String collection) { + this.collection = collection; + return this; + } + + @Override + public DeleteReplicaProp setShardName(String shard) { + this.shard = shard; + return this; + } + + @Override + public DeleteReplicaProp setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); @@ -1228,44 +1461,49 @@ public SolrParams getParams() { return params; } - @Override - protected DeleteReplicaProp getThis() { - return this; - } + } // MIGRATECLUSTERSTATE request - public static class MigrateClusterState extends CollectionShardAsyncAdminRequest { + public static class MigrateClusterState extends AsyncCollectionAdminRequest { + + protected String collection; public MigrateClusterState() { - this.action = CollectionAction.MIGRATESTATEFORMAT; + super(CollectionAction.MIGRATESTATEFORMAT); } - @Override - public MigrateClusterState setShardName(String shard) { - throw new UnsupportedOperationException(); + public MigrateClusterState setCollectionName(String collection) { + this.collection = collection; + return this; } @Override - public String getShardName() { - throw new UnsupportedOperationException(); + public MigrateClusterState setAsyncId(String id) { + this.asyncId = id; + return this; } @Override - protected MigrateClusterState getThis() { - return this; + public SolrParams getParams() { + ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); + if (collection == null) + throw new IllegalArgumentException("You must call setCollection() on this request"); + params.set(CoreAdminParams.COLLECTION, collection); + return params; } } // BALANCESHARDUNIQUE request - public static class BalanceShardUnique extends AsyncCollectionAdminRequest { + public static class BalanceShardUnique extends AsyncCollectionAdminRequest { + protected String collection; protected String propertyName; protected Boolean onlyActiveNodes; protected Boolean shardUnique; public BalanceShardUnique() { - this.action = CollectionAction.BALANCESHARDUNIQUE; + super(CollectionAction.BALANCESHARDUNIQUE); } public String getPropertyName() { @@ -1304,21 +1542,24 @@ public String getCollection() { return collection; } + @Override + public BalanceShardUnique setAsyncId(String id) { + this.asyncId = id; + return this; + } + @Override public SolrParams getParams() { ModifiableSolrParams params = new ModifiableSolrParams(super.getParams()); params.set(CoreAdminParams.COLLECTION, collection); params.set("property", propertyName); - if(onlyActiveNodes != null) + if (onlyActiveNodes != null) params.set("onlyactivenodes", onlyActiveNodes); - if(shardUnique != null) + if (shardUnique != null) params.set("shardUnique", shardUnique); return params; } - @Override - protected BalanceShardUnique getThis() { - return this; - } } + } From b97e17096e770a4a7c4edc0a353755673ef359ad Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Mar 2016 06:20:25 -0500 Subject: [PATCH 0056/1113] LUCENE-7076: Improve MIGRATE.txt/Point javadocs --- lucene/MIGRATE.txt | 14 ++--- .../apache/lucene/document/BinaryPoint.java | 2 + .../apache/lucene/document/DoublePoint.java | 2 + .../apache/lucene/document/FloatPoint.java | 2 + .../org/apache/lucene/document/IntPoint.java | 2 + .../org/apache/lucene/document/LongPoint.java | 2 + .../org/apache/lucene/index/PointValues.java | 54 +++++++++++++++++-- .../apache/lucene/search/PointInSetQuery.java | 13 +---- .../apache/lucene/search/PointRangeQuery.java | 11 +--- .../lucene/document/BigIntegerPoint.java | 2 + .../lucene/document/InetAddressPoint.java | 2 + .../apache/lucene/document/LatLonPoint.java | 2 + .../apache/lucene/spatial3d/Geo3DPoint.java | 3 +- 13 files changed, 79 insertions(+), 32 deletions(-) diff --git a/lucene/MIGRATE.txt b/lucene/MIGRATE.txt index b48226de8677..b94c12d24e62 100644 --- a/lucene/MIGRATE.txt +++ b/lucene/MIGRATE.txt @@ -74,10 +74,12 @@ would be equivalent to the following code with the old setBoost API: float boost = ...; q.setBoost(q.getBoost() * boost); -## DimensionalValues replaces NumericField (LUCENE-6917) +# PointValues replaces NumericField (LUCENE-6917) -DimensionalValues provides faster indexing and searching, a smaller -index size, and less heap used at search time. The numeric fields -(IntField, FloatField, LongField, DoubleField) and NumericRangeQuery -have been moved to the backward-codecs module and prefixed with -Legacy. +PointValues provides faster indexing and searching, a smaller +index size, and less heap used at search time. See org.apache.lucene.index.PointValues +for an introduction. + +Legacy numeric encodings from previous versions of Lucene are +deprecated as LegacyIntField, LegacyFloatField, LegacyLongField, and LegacyDoubleField, +and can be searched with LegacyNumericRangeQuery. diff --git a/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java b/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java index e139a877264a..4e27d8127c42 100644 --- a/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java @@ -19,6 +19,7 @@ import java.util.Arrays; import java.util.Comparator; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; @@ -40,6 +41,7 @@ *
  • {@link #newRangeQuery(String, byte[], byte[])} for matching a 1D range. *
  • {@link #newRangeQuery(String, byte[][], byte[][])} for matching points/ranges in n-dimensional space. * + * @see PointValues */ public final class BinaryPoint extends Field { diff --git a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java index 52b039f818ea..26ac0ced6f10 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java @@ -18,6 +18,7 @@ import java.util.Arrays; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; @@ -38,6 +39,7 @@ *
  • {@link #newRangeQuery(String, double, double)} for matching a 1D range. *
  • {@link #newRangeQuery(String, double[], double[])} for matching points/ranges in n-dimensional space. * + * @see PointValues */ public final class DoublePoint extends Field { diff --git a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java index cad666c4528d..c58881ec8800 100644 --- a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java @@ -18,6 +18,7 @@ import java.util.Arrays; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; @@ -38,6 +39,7 @@ *
  • {@link #newRangeQuery(String, float, float)} for matching a 1D range. *
  • {@link #newRangeQuery(String, float[], float[])} for matching points/ranges in n-dimensional space. * + * @see PointValues */ public final class FloatPoint extends Field { diff --git a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java index b6f5ae764d49..cb8315f2f205 100644 --- a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java @@ -18,6 +18,7 @@ import java.util.Arrays; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; @@ -38,6 +39,7 @@ *
  • {@link #newRangeQuery(String, int, int)} for matching a 1D range. *
  • {@link #newRangeQuery(String, int[], int[])} for matching points/ranges in n-dimensional space. * + * @see PointValues */ public final class IntPoint extends Field { diff --git a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java index c0672aeacc68..ff78132d7b66 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java @@ -18,6 +18,7 @@ import java.util.Arrays; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; @@ -38,6 +39,7 @@ *
  • {@link #newRangeQuery(String, long, long)} for matching a 1D range. *
  • {@link #newRangeQuery(String, long[], long[])} for matching points/ranges in n-dimensional space. * + * @see PointValues */ public final class LongPoint extends Field { diff --git a/lucene/core/src/java/org/apache/lucene/index/PointValues.java b/lucene/core/src/java/org/apache/lucene/index/PointValues.java index 230a14f4be86..1fb265443028 100644 --- a/lucene/core/src/java/org/apache/lucene/index/PointValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/PointValues.java @@ -17,17 +17,65 @@ package org.apache.lucene.index; import java.io.IOException; +import java.math.BigInteger; +import java.net.InetAddress; import org.apache.lucene.document.BinaryPoint; import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.util.bkd.BKDWriter; -/** Allows recursively visiting point values indexed with {@link IntPoint}, - * {@link FloatPoint}, {@link LongPoint}, {@link DoublePoint} - * or {@link BinaryPoint}. +/** + * Access to indexed numeric values. + *

    + * Points represent numeric values and are indexed differently than ordinary text. Instead of an inverted index, + * points are indexed with datastructures such as KD-trees. + * These structures are optimized for operations such as range, distance, nearest-neighbor, + * and point-in-polygon queries. + *

    Basic Point Types

    + * + * + * + * + * + * + * + * + * + *
    Java typeLucene class
    {@code int}{@link IntPoint}
    {@code long}{@link LongPoint}
    {@code float}{@link FloatPoint}
    {@code double}{@link DoublePoint}
    {@code byte[]}{@link BinaryPoint}
    {@link BigInteger}BigIntegerPoint*
    {@link InetAddress}InetAddressPoint*
    + * * in the lucene-sandbox jar
    + *

    + * Basic Lucene point types behave like their java peers: for example {@link IntPoint} represents a signed 32-bit + * {@link Integer}, supporting values ranging from {@link Integer#MIN_VALUE} to {@link Integer#MAX_VALUE}, ordered + * consistent with {@link Integer#compareTo(Integer)}. In addition to indexing support, point classes also contain + * static methods (such as {@link IntPoint#newRangeQuery(String, int, int)}) for creating common queries. For example: + *

    + *   // add year 1970 to document
    + *   document.add(new IntPoint("year", 1970));
    + *   // index document
    + *   writer.addDocument(document);
    + *   ...
    + *   // issue range query of 1960-1980
    + *   Query query = IntPoint.newRangeQuery("year", 1960, 1980);
    + *   TopDocs docs = searcher.search(query, ...);
    + * 
    + *

    Geospatial Point Types

    + * Although basic point types such as {@link DoublePoint} support points in multi-dimensional space too, Lucene has + * specialized classes for location data. These classes are optimized for location data: they are more space-efficient and + * support special operations such as distance and polygon queries. There are currently two implementations: + *
    + *
      + *
    1. LatLonPoint in lucene-sandbox: indexes {@code (latitude,longitude)} as {@code (x,y)} in two-dimensional space. + *
    2. Geo3DPoint* in lucene-spatial3d: indexes {@code (latitude,longitude)} as {@code (x,y,z)} in three-dimensional space. + *
    + * * does not support altitude, 3D here means "uses three dimensions under-the-hood"
    + *

    Advanced usage

    + * Custom structures can be created on top of single- or multi- dimensional basic types, on top of + * {@link BinaryPoint} for more flexibility, or via custom {@link Field} subclasses. * * @lucene.experimental */ public abstract class PointValues { diff --git a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java index f5ba12dc0db6..944fadfb2bb2 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java @@ -16,15 +16,10 @@ */ package org.apache.lucene.search; - import java.io.IOException; import java.util.Arrays; -import org.apache.lucene.document.BinaryPoint; -import org.apache.lucene.document.DoublePoint; -import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntPoint; -import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -48,13 +43,7 @@ * create range queries for lucene's standard {@code Point} types, refer to factory * methods on those classes, e.g. {@link IntPoint#newSetQuery IntPoint.newSetQuery()} for * fields indexed with {@link IntPoint}. - - * @see IntPoint - * @see LongPoint - * @see FloatPoint - * @see DoublePoint - * @see BinaryPoint - * + * @see PointValues * @lucene.experimental */ public abstract class PointInSetQuery extends Query { diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java index 777c13388130..ebbe7e2fbd6a 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java @@ -23,11 +23,7 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues.Relation; -import org.apache.lucene.document.BinaryPoint; // javadocs -import org.apache.lucene.document.DoublePoint; // javadocs -import org.apache.lucene.document.FloatPoint; // javadocs import org.apache.lucene.document.IntPoint; // javadocs -import org.apache.lucene.document.LongPoint; // javadocs import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -44,12 +40,7 @@ * fields indexed with {@link IntPoint}. *

    * For a single-dimensional field this query is a simple range query; in a multi-dimensional field it's a box shape. - * @see IntPoint - * @see LongPoint - * @see FloatPoint - * @see DoublePoint - * @see BinaryPoint - * + * @see PointValues * @lucene.experimental */ public abstract class PointRangeQuery extends Query { diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java index f1758581d2dc..70445d6d2fd7 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/BigIntegerPoint.java @@ -19,6 +19,7 @@ import java.math.BigInteger; import java.util.Arrays; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; @@ -39,6 +40,7 @@ *

  • {@link #newRangeQuery(String, BigInteger, BigInteger)} for matching a 1D range. *
  • {@link #newRangeQuery(String, BigInteger[], BigInteger[])} for matching points/ranges in n-dimensional space. * + * @see PointValues */ public class BigIntegerPoint extends Field { diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java index a0623b367927..f0df6ff29564 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/InetAddressPoint.java @@ -20,6 +20,7 @@ import java.net.UnknownHostException; import java.util.Arrays; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; @@ -43,6 +44,7 @@ * This field supports both IPv4 and IPv6 addresses: IPv4 addresses are converted * to IPv4-Mapped IPv6 Addresses: * indexing {@code 1.2.3.4} is the same as indexing {@code ::FFFF:1.2.3.4}. + * @see PointValues */ public class InetAddressPoint extends Field { diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java index 5f45cb5996a9..fd3284b6e29b 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java @@ -20,6 +20,7 @@ import org.apache.lucene.util.NumericUtils; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -43,6 +44,7 @@ *

    * WARNING: Values are indexed with some loss of precision, incurring up to 1E-7 error from the * original {@code double} values. + * @see PointValues */ // TODO ^^^ that is very sandy and hurts the API, usage, and tests tremendously, because what the user passes // to the field is not actually what gets indexed. Float would be 1E-5 error vs 1E-7, but it might be diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DPoint.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DPoint.java index cd2c79a15d0f..955a2bc0f36a 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DPoint.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DPoint.java @@ -18,6 +18,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; +import org.apache.lucene.index.PointValues; import org.apache.lucene.spatial3d.geom.GeoPoint; import org.apache.lucene.spatial3d.geom.GeoShape; import org.apache.lucene.spatial3d.geom.PlanetModel; @@ -34,7 +35,7 @@ *

      *
    • {@link #newShapeQuery newShapeQuery()} for matching all points inside a specified shape *
    - * + * @see PointValues * @lucene.experimental */ public final class Geo3DPoint extends Field { From 914003d19a7c44a8d9aa5fd0a7a6e59882eb1c69 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Mar 2016 06:42:35 -0500 Subject: [PATCH 0057/1113] LUCENE-7077: fail precommit on useless assignment --- .../java/org/apache/lucene/codecs/memory/FSTTermsWriter.java | 2 +- lucene/tools/javadoc/ecj.javadocs.prefs | 2 +- .../test/org/apache/solr/update/DirectUpdateHandlerTest.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java index 50044f1c1eeb..8284d7444f26 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsWriter.java @@ -167,7 +167,7 @@ public void write(Fields fields) throws IOException { FieldInfo fieldInfo = fieldInfos.fieldInfo(field); boolean hasFreq = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; TermsEnum termsEnum = terms.iterator(); - TermsWriter termsWriter = termsWriter = new TermsWriter(fieldInfo); + TermsWriter termsWriter = new TermsWriter(fieldInfo); long sumTotalTermFreq = 0; long sumDocFreq = 0; diff --git a/lucene/tools/javadoc/ecj.javadocs.prefs b/lucene/tools/javadoc/ecj.javadocs.prefs index 63f22e6c1eca..d01148c602fa 100644 --- a/lucene/tools/javadoc/ecj.javadocs.prefs +++ b/lucene/tools/javadoc/ecj.javadocs.prefs @@ -51,7 +51,7 @@ org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled org.eclipse.jdt.core.compiler.problem.missingSerialVersion=ignore org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore -org.eclipse.jdt.core.compiler.problem.noEffectAssignment=ignore +org.eclipse.jdt.core.compiler.problem.noEffectAssignment=error org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=ignore org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore org.eclipse.jdt.core.compiler.problem.nullReference=ignore diff --git a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java index d35614d02303..2bde1186ad67 100644 --- a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java @@ -299,7 +299,7 @@ public void testExpungeDeletes() throws Exception { assertU(commit("expungeDeletes","true")); sr = req("q","foo"); - r = r = sr.getSearcher().getIndexReader(); + r = sr.getSearcher().getIndexReader(); assertEquals(r.maxDoc(), r.numDocs()); // no deletions assertEquals(4,r.maxDoc()); // no dups sr.close(); From 644d534f6c5d68e149f5ff36c477e82ecec0f3d2 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Tue, 8 Mar 2016 18:00:12 +0530 Subject: [PATCH 0058/1113] SOLR-8736: schema GET operations on fields, dynamicFields, fieldTypes, copyField have less details --- solr/CHANGES.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index ba2e737519cf..1bba686419ac 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -106,7 +106,8 @@ Upgrading from Solr 5.x * When requesting stats in date fields, "sum" is now a double value instead of a date. See SOLR-8671 -* SOLR-8736: The deprecated GET methods for schema are now accessible and implemented differently +* SOLR-8736: The deprecated GET methods for schema are now accessible through the bulk API. The output + has less details and is not backward compatible. Detailed Change List ---------------------- From 2ede76b3871befdcf0eca9a6950101253d8e7521 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Mar 2016 07:36:09 -0500 Subject: [PATCH 0059/1113] LUCENE-7075: convert test class to use points --- .../index/BaseStoredFieldsFormatTestCase.java | 53 ++++++++----------- 1 file changed, 22 insertions(+), 31 deletions(-) diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java index 840fdf5987e6..c58d56aa11e3 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java @@ -33,20 +33,15 @@ import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.simpletext.SimpleTextCodec; import org.apache.lucene.document.Document; -import org.apache.lucene.document.LegacyDoubleField; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType.LegacyNumericType; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.LegacyFloatField; -import org.apache.lucene.document.LegacyIntField; -import org.apache.lucene.document.LegacyLongField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; @@ -244,51 +239,44 @@ public void testNumericField() throws Exception { RandomIndexWriter w = new RandomIndexWriter(random(), dir); final int numDocs = atLeast(500); final Number[] answers = new Number[numDocs]; - final LegacyNumericType[] typeAnswers = new LegacyNumericType[numDocs]; + final Class[] typeAnswers = new Class[numDocs]; for(int id=0;id typeAnswer; if (random().nextBoolean()) { // float/double if (random().nextBoolean()) { final float f = random().nextFloat(); answer = Float.valueOf(f); - nf = new LegacyFloatField("nf", f, Field.Store.NO); - sf = new StoredField("nf", f); - typeAnswer = LegacyNumericType.FLOAT; + nf = new StoredField("nf", f); + typeAnswer = Float.class; } else { final double d = random().nextDouble(); answer = Double.valueOf(d); - nf = new LegacyDoubleField("nf", d, Field.Store.NO); - sf = new StoredField("nf", d); - typeAnswer = LegacyNumericType.DOUBLE; + nf = new StoredField("nf", d); + typeAnswer = Double.class; } } else { // int/long if (random().nextBoolean()) { final int i = random().nextInt(); answer = Integer.valueOf(i); - nf = new LegacyIntField("nf", i, Field.Store.NO); - sf = new StoredField("nf", i); - typeAnswer = LegacyNumericType.INT; + nf = new StoredField("nf", i); + typeAnswer = Integer.class; } else { final long l = random().nextLong(); answer = Long.valueOf(l); - nf = new LegacyLongField("nf", l, Field.Store.NO); - sf = new StoredField("nf", l); - typeAnswer = LegacyNumericType.LONG; + nf = new StoredField("nf", l); + typeAnswer = Long.class; } } doc.add(nf); - doc.add(sf); answers[id] = answer; typeAnswers[id] = typeAnswer; - FieldType ft = new FieldType(LegacyIntField.TYPE_STORED); - ft.setNumericPrecisionStep(Integer.MAX_VALUE); - doc.add(new LegacyIntField("id", id, ft)); + doc.add(new StoredField("id", id)); + doc.add(new IntPoint("id", id)); doc.add(new NumericDocValuesField("id", id)); w.addDocument(doc); } @@ -348,10 +336,10 @@ public void testReadSkip() throws IOException { List fields = Arrays.asList( new Field("bytes", bytes, ft), new Field("string", string, ft), - new LegacyLongField("long", l, Store.YES), - new LegacyIntField("int", i, Store.YES), - new LegacyFloatField("float", f, Store.YES), - new LegacyDoubleField("double", d, Store.YES) + new StoredField("long", l), + new StoredField("int", i), + new StoredField("float", f), + new StoredField("double", d) ); for (int k = 0; k < 100; ++k) { @@ -519,11 +507,14 @@ public void testWriteReadMerge() throws IOException { final FieldType type = new FieldType(StringField.TYPE_STORED); type.setIndexOptions(IndexOptions.NONE); type.freeze(); - LegacyIntField id = new LegacyIntField("id", 0, Store.YES); + IntPoint id = new IntPoint("id", 0); + StoredField idStored = new StoredField("id", 0); for (int i = 0; i < data.length; ++i) { Document doc = new Document(); doc.add(id); + doc.add(idStored); id.setIntValue(i); + idStored.setIntValue(i); for (int j = 0; j < data[i].length; ++j) { Field f = new Field("bytes" + j, data[i][j], type); doc.add(f); @@ -546,7 +537,7 @@ public void testWriteReadMerge() throws IOException { for (int i = 0; i < 10; ++i) { final int min = random().nextInt(data.length); final int max = min + random().nextInt(20); - iw.deleteDocuments(LegacyNumericRangeQuery.newIntRange("id", min, max, true, false)); + iw.deleteDocuments(IntPoint.newRangeQuery("id", min, max-1)); } iw.forceMerge(2); // force merges with deletions From 058ddffcafd215039194c84ac6b6f315f9dd83cd Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Tue, 8 Mar 2016 18:49:44 +0530 Subject: [PATCH 0060/1113] SOLR-8766: deprecated tag in solrconfig.xml is removed --- solr/CHANGES.txt | 2 ++ .../src/test-files/solr/collection1/conf/solrconfig.xml | 6 ------ .../src/test-files/solr/minimr/conf/solrconfig.xml | 6 ------ .../src/test-files/solr/mrunit/conf/solrconfig.xml | 7 ------- .../solr/solrcelltest/collection1/conf/solrconfig.xml | 6 ------ .../src/test-files/solr/solrcloud/conf/solrconfig.xml | 6 ------ solr/example/example-DIH/solr/db/conf/solrconfig.xml | 6 ------ solr/example/example-DIH/solr/mail/conf/solrconfig.xml | 6 ------ solr/example/example-DIH/solr/rss/conf/solrconfig.xml | 6 ------ solr/example/example-DIH/solr/solr/conf/solrconfig.xml | 6 ------ solr/example/example-DIH/solr/tika/conf/solrconfig.xml | 7 ------- solr/example/files/conf/solrconfig.xml | 6 ------ .../solr/configsets/basic_configs/conf/solrconfig.xml | 5 ----- .../data_driven_schema_configs/conf/solrconfig.xml | 7 ------- .../sample_techproducts_configs/conf/solrconfig.xml | 6 ------ 15 files changed, 2 insertions(+), 86 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 1bba686419ac..fa43ecec71cf 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -415,6 +415,8 @@ Other Changes * SOLR-8736: schema GET operations on fields, dynamicFields, fieldTypes, copyField are reimplemented as a part of the bulk API with less details (noble) +* SOLR-8766 : deprecated tag in solrconfig.xml is removed (noble) + ================== 5.5.1 ================== Bug Fixes diff --git a/solr/contrib/morphlines-core/src/test-files/solr/collection1/conf/solrconfig.xml b/solr/contrib/morphlines-core/src/test-files/solr/collection1/conf/solrconfig.xml index b8d1ca66e22f..ab1acd9ec556 100644 --- a/solr/contrib/morphlines-core/src/test-files/solr/collection1/conf/solrconfig.xml +++ b/solr/contrib/morphlines-core/src/test-files/solr/collection1/conf/solrconfig.xml @@ -1487,11 +1487,5 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - diff --git a/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/solrconfig.xml b/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/solrconfig.xml index 65637c19e514..f5ce41b4a710 100644 --- a/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/solrconfig.xml +++ b/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/solrconfig.xml @@ -1508,10 +1508,4 @@ --> - - - - *:* - - diff --git a/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/solrconfig.xml b/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/solrconfig.xml index 691643fc90b6..d40b16de41d9 100644 --- a/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/solrconfig.xml +++ b/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/solrconfig.xml @@ -1511,11 +1511,4 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - - diff --git a/solr/contrib/morphlines-core/src/test-files/solr/solrcelltest/collection1/conf/solrconfig.xml b/solr/contrib/morphlines-core/src/test-files/solr/solrcelltest/collection1/conf/solrconfig.xml index b8d1ca66e22f..ab1acd9ec556 100644 --- a/solr/contrib/morphlines-core/src/test-files/solr/solrcelltest/collection1/conf/solrconfig.xml +++ b/solr/contrib/morphlines-core/src/test-files/solr/solrcelltest/collection1/conf/solrconfig.xml @@ -1487,11 +1487,5 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - diff --git a/solr/contrib/morphlines-core/src/test-files/solr/solrcloud/conf/solrconfig.xml b/solr/contrib/morphlines-core/src/test-files/solr/solrcloud/conf/solrconfig.xml index 336c789ea131..9d33201ba331 100644 --- a/solr/contrib/morphlines-core/src/test-files/solr/solrcloud/conf/solrconfig.xml +++ b/solr/contrib/morphlines-core/src/test-files/solr/solrcloud/conf/solrconfig.xml @@ -1510,11 +1510,5 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - diff --git a/solr/example/example-DIH/solr/db/conf/solrconfig.xml b/solr/example/example-DIH/solr/db/conf/solrconfig.xml index 6050c1fba56c..0a4c6ea5ae65 100644 --- a/solr/example/example-DIH/solr/db/conf/solrconfig.xml +++ b/solr/example/example-DIH/solr/db/conf/solrconfig.xml @@ -1486,11 +1486,5 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - diff --git a/solr/example/example-DIH/solr/mail/conf/solrconfig.xml b/solr/example/example-DIH/solr/mail/conf/solrconfig.xml index 59861b23643f..1cf7d8975bf4 100644 --- a/solr/example/example-DIH/solr/mail/conf/solrconfig.xml +++ b/solr/example/example-DIH/solr/mail/conf/solrconfig.xml @@ -1489,11 +1489,5 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - diff --git a/solr/example/example-DIH/solr/rss/conf/solrconfig.xml b/solr/example/example-DIH/solr/rss/conf/solrconfig.xml index 4a9f5e81c6e0..8db8e55fcdbe 100644 --- a/solr/example/example-DIH/solr/rss/conf/solrconfig.xml +++ b/solr/example/example-DIH/solr/rss/conf/solrconfig.xml @@ -1485,11 +1485,5 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - diff --git a/solr/example/example-DIH/solr/solr/conf/solrconfig.xml b/solr/example/example-DIH/solr/solr/conf/solrconfig.xml index 836b1fd14aab..318fbf8bb022 100644 --- a/solr/example/example-DIH/solr/solr/conf/solrconfig.xml +++ b/solr/example/example-DIH/solr/solr/conf/solrconfig.xml @@ -1485,11 +1485,5 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - diff --git a/solr/example/example-DIH/solr/tika/conf/solrconfig.xml b/solr/example/example-DIH/solr/tika/conf/solrconfig.xml index 98b2a8ec2cf9..689429897be3 100644 --- a/solr/example/example-DIH/solr/tika/conf/solrconfig.xml +++ b/solr/example/example-DIH/solr/tika/conf/solrconfig.xml @@ -1463,11 +1463,4 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - - diff --git a/solr/example/files/conf/solrconfig.xml b/solr/example/files/conf/solrconfig.xml index 7e488a895dc9..7aa682a00176 100644 --- a/solr/example/files/conf/solrconfig.xml +++ b/solr/example/files/conf/solrconfig.xml @@ -1505,10 +1505,4 @@ --> - - - - *:* - - diff --git a/solr/server/solr/configsets/basic_configs/conf/solrconfig.xml b/solr/server/solr/configsets/basic_configs/conf/solrconfig.xml index 6e76b306eea2..a4c00b2af6f0 100644 --- a/solr/server/solr/configsets/basic_configs/conf/solrconfig.xml +++ b/solr/server/solr/configsets/basic_configs/conf/solrconfig.xml @@ -559,9 +559,4 @@ - - - *:* - - diff --git a/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml b/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml index 65a3cb8ddd4a..bd58d3634217 100644 --- a/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml +++ b/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml @@ -1479,11 +1479,4 @@ EditorialMarkerFactory will do exactly that: --> - - - - - *:* - - diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml index 70dc965d0e5b..be3c16fdbfdd 100644 --- a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml +++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml @@ -1698,10 +1698,4 @@ --> - - - - *:* - - From 51278ff4c038c3baf96868a6fa2258392f45a98c Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Mar 2016 08:40:39 -0500 Subject: [PATCH 0061/1113] LUCENE-7075: remove legacy numericutils usage from test. --- .../lucene/search/join/TestBlockJoin.java | 72 ++++++++++--------- .../java/org/apache/lucene/util/TestUtil.java | 23 ------ 2 files changed, 38 insertions(+), 57 deletions(-) diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java index a1987744cae4..b5f2038a0dcf 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java @@ -26,8 +26,8 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.Field; -import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.StoredField; @@ -52,10 +52,8 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryUtils; @@ -73,8 +71,6 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -93,7 +89,7 @@ private Document makeResume(String name, String country) { private Document makeJob(String skill, int year) { Document job = new Document(); job.add(newStringField("skill", skill, Field.Store.YES)); - job.add(new LegacyIntField("year", year, Field.Store.NO)); + job.add(new IntPoint("year", year)); job.add(new StoredField("year", year)); return job; } @@ -102,7 +98,7 @@ private Document makeJob(String skill, int year) { private Document makeQualification(String qualification, int year) { Document job = new Document(); job.add(newStringField("qualification", qualification, Field.Store.YES)); - job.add(new LegacyIntField("year", year, Field.Store.NO)); + job.add(new IntPoint("year", year)); return job; } @@ -135,7 +131,7 @@ public void testEmptyChildFilter() throws Exception { BooleanQuery.Builder childQuery = new BooleanQuery.Builder(); childQuery.add(new BooleanClause(new TermQuery(new Term("skill", "java")), Occur.MUST)); - childQuery.add(new BooleanClause(LegacyNumericRangeQuery.newIntRange("year", 2006, 2011, true, true), Occur.MUST)); + childQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 2006, 2011), Occur.MUST)); ToParentBlockJoinQuery childJoinQuery = new ToParentBlockJoinQuery(childQuery.build(), parentsFilter, ScoreMode.Avg); @@ -189,7 +185,7 @@ public void testSimple() throws Exception { // Define child document criteria (finds an example of relevant work experience) BooleanQuery.Builder childQuery = new BooleanQuery.Builder(); childQuery.add(new BooleanClause(new TermQuery(new Term("skill", "java")), Occur.MUST)); - childQuery.add(new BooleanClause(LegacyNumericRangeQuery.newIntRange("year", 2006, 2011, true, true), Occur.MUST)); + childQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 2006, 2011), Occur.MUST)); // Define parent document criteria (find a resident in the UK) Query parentQuery = new TermQuery(new Term("country", "United Kingdom")); @@ -269,23 +265,30 @@ public void testBugCausedByRewritingTwice() throws IOException { w.close(); IndexSearcher s = newSearcher(r); - MultiTermQuery qc = LegacyNumericRangeQuery.newIntRange("year", 2007, 2007, true, true); // Hacky: this causes the query to need 2 rewrite // iterations: - qc.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE); + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(IntPoint.newExactQuery("year", 2007), BooleanClause.Occur.MUST); + Query qc = new Query() { + @Override + public Query rewrite(IndexReader reader) throws IOException { + return builder.build(); + } + + @Override + public String toString(String field) { + return "hack!"; + } + }; BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); CheckJoinIndex.check(r, parentsFilter); - int h1 = qc.hashCode(); Query qw1 = qc.rewrite(r); - int h2 = qw1.hashCode(); Query qw2 = qw1.rewrite(r); - int h3 = qw2.hashCode(); - assertTrue(h1 != h2); - assertTrue(h2 != h3); - assertTrue(h3 != h1); + assertNotSame(qc, qw1); + assertNotSame(qw1, qw2); ToParentBlockJoinQuery qp = new ToParentBlockJoinQuery(qc, parentsFilter, ScoreMode.Max); ToParentBlockJoinCollector c = new ToParentBlockJoinCollector(Sort.RELEVANCE, 10, true, true); @@ -342,7 +345,7 @@ public void testSimpleFilter() throws Exception { // Define child document criteria (finds an example of relevant work experience) BooleanQuery.Builder childQuery = new BooleanQuery.Builder(); childQuery.add(new BooleanClause(new TermQuery(new Term("skill", "java")), Occur.MUST)); - childQuery.add(new BooleanClause(LegacyNumericRangeQuery.newIntRange("year", 2006, 2011, true, true), Occur.MUST)); + childQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 2006, 2011), Occur.MUST)); // Define parent document criteria (find a resident in the UK) Query parentQuery = new TermQuery(new Term("country", "United Kingdom")); @@ -516,7 +519,7 @@ public void testRandom() throws Exception { for(int parentDocID=0;parentDocID joinDocs = new ArrayList<>(); @@ -563,7 +566,7 @@ public void testRandom() throws Exception { Document joinChildDoc = new Document(); joinDocs.add(joinChildDoc); - Field childID = new LegacyIntField("childID", childDocID, Field.Store.YES); + Field childID = new StoredField("childID", childDocID); childDoc.add(childID); joinChildDoc.add(childID); childID = new NumericDocValuesField("childID", childDocID); @@ -596,7 +599,7 @@ public void testRandom() throws Exception { } if (doDeletes) { - joinChildDoc.add(new LegacyIntField("blockID", parentDocID, Field.Store.NO)); + joinChildDoc.add(new IntPoint("blockID", parentDocID)); } w.addDocument(childDoc); @@ -611,14 +614,15 @@ public void testRandom() throws Exception { } } - BytesRefBuilder term = new BytesRefBuilder(); - for(int deleteID : toDelete) { - if (VERBOSE) { - System.out.println("DELETE parentID=" + deleteID); + if (!toDelete.isEmpty()) { + // TODO: we should add newSetQuery(String, Collection) ? this is awkward. + int[] array = new int[toDelete.size()]; + for (int i = 0; i < toDelete.size(); i++) { + array[i] = toDelete.get(i); } - LegacyNumericUtils.intToPrefixCoded(deleteID, 0, term); - w.deleteDocuments(new Term("blockID", term.toBytesRef())); - joinW.deleteDocuments(new Term("blockID", term.toBytesRef())); + Query query = IntPoint.newSetQuery("blockID", array); + w.deleteDocuments(query); + joinW.deleteDocuments(query); } final IndexReader r = w.getReader(); @@ -1061,11 +1065,11 @@ public void testMultiChildTypes() throws Exception { // Define child document criteria (finds an example of relevant work experience) BooleanQuery.Builder childJobQuery = new BooleanQuery.Builder(); childJobQuery.add(new BooleanClause(new TermQuery(new Term("skill", "java")), Occur.MUST)); - childJobQuery.add(new BooleanClause(LegacyNumericRangeQuery.newIntRange("year", 2006, 2011, true, true), Occur.MUST)); + childJobQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 2006, 2011), Occur.MUST)); BooleanQuery.Builder childQualificationQuery = new BooleanQuery.Builder(); childQualificationQuery.add(new BooleanClause(new TermQuery(new Term("qualification", "maths")), Occur.MUST)); - childQualificationQuery.add(new BooleanClause(LegacyNumericRangeQuery.newIntRange("year", 1980, 2000, true, true), Occur.MUST)); + childQualificationQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 1980, 2000), Occur.MUST)); // Define parent document criteria (find a resident in the UK) @@ -1210,7 +1214,7 @@ public void testGetTopGroups() throws Exception { // Define child document criteria (finds an example of relevant work experience) BooleanQuery.Builder childQuery = new BooleanQuery.Builder(); childQuery.add(new BooleanClause(new TermQuery(new Term("skill", "java")), Occur.MUST)); - childQuery.add(new BooleanClause(LegacyNumericRangeQuery.newIntRange("year", 2006, 2011, true, true), Occur.MUST)); + childQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 2006, 2011), Occur.MUST)); // Wrap the child document query to 'join' any matches // up to corresponding parent: @@ -1707,7 +1711,7 @@ public void testMultiChildQueriesOfDiffParentLevels() throws Exception { Query resumeQuery = new ToChildBlockJoinQuery(new TermQuery(new Term("country","rv" + qrv)), resumeFilter); - Query jobQuery = new ToChildBlockJoinQuery(LegacyNumericRangeQuery.newIntRange("year", qjv, qjv, true, true), + Query jobQuery = new ToChildBlockJoinQuery(IntPoint.newRangeQuery("year", qjv, qjv), jobFilter); BooleanQuery.Builder fullQuery = new BooleanQuery.Builder(); diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java index 50692e81322a..5e328ba13661 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java @@ -61,11 +61,6 @@ import org.apache.lucene.document.BinaryPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType.LegacyNumericType; -import org.apache.lucene.document.LegacyDoubleField; -import org.apache.lucene.document.LegacyFloatField; -import org.apache.lucene.document.LegacyIntField; -import org.apache.lucene.document.LegacyLongField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.index.CheckIndex; @@ -1072,7 +1067,6 @@ public static Document cloneDocument(Document doc1) { final Field field2; final DocValuesType dvType = field1.fieldType().docValuesType(); final int dimCount = field1.fieldType().pointDimensionCount(); - final LegacyNumericType numType = field1.fieldType().numericType(); if (dvType != DocValuesType.NONE) { switch(dvType) { case NUMERIC: @@ -1092,23 +1086,6 @@ public static Document cloneDocument(Document doc1) { byte[] bytes = new byte[br.length]; System.arraycopy(br.bytes, br.offset, bytes, 0, br.length); field2 = new BinaryPoint(field1.name(), bytes, field1.fieldType()); - } else if (numType != null) { - switch (numType) { - case INT: - field2 = new LegacyIntField(field1.name(), field1.numericValue().intValue(), field1.fieldType()); - break; - case FLOAT: - field2 = new LegacyFloatField(field1.name(), field1.numericValue().intValue(), field1.fieldType()); - break; - case LONG: - field2 = new LegacyLongField(field1.name(), field1.numericValue().intValue(), field1.fieldType()); - break; - case DOUBLE: - field2 = new LegacyDoubleField(field1.name(), field1.numericValue().intValue(), field1.fieldType()); - break; - default: - throw new IllegalStateException("unknown Type: " + numType); - } } else { field2 = new Field(field1.name(), field1.stringValue(), field1.fieldType()); } From 981ad999d075266423b01ece3be31ada3f89913b Mon Sep 17 00:00:00 2001 From: Varun Thacker Date: Tue, 8 Mar 2016 19:11:00 +0530 Subject: [PATCH 0062/1113] SOLR-8766: Remove support for admin/gettableFiles as well --- solr/CHANGES.txt | 3 +- .../java/org/apache/solr/core/SolrCore.java | 51 ------------------- 2 files changed, 2 insertions(+), 52 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index fa43ecec71cf..a002efa8282e 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -415,7 +415,8 @@ Other Changes * SOLR-8736: schema GET operations on fields, dynamicFields, fieldTypes, copyField are reimplemented as a part of the bulk API with less details (noble) -* SOLR-8766 : deprecated tag in solrconfig.xml is removed (noble) +* SOLR-8766: Remove deprecated tag in solrconfig.xml and support for admin/gettableFiles + (noble, Jason Gerlowski, Varun Thacker) ================== 5.5.1 ================== diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index f4c40f842f7d..cde878a5ebf3 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -726,9 +726,6 @@ public SolrCore(String name, String dataDir, SolrConfig config, reqHandlers = new RequestHandlers(this); reqHandlers.initHandlersFromConfig(solrConfig); - // Handle things that should eventually go away - initDeprecatedSupport(); - statsCache = initStatsCache(); // cause the executor to stall so firstSearcher events won't fire @@ -2269,54 +2266,6 @@ public ValueSourceParser getValueSourceParser(String parserName) { return valueSourceParsers.get(parserName); } - /** - * Manage anything that should be taken care of in case configs change - */ - private void initDeprecatedSupport() - { - // TODO -- this should be removed in deprecation release... - String gettable = solrConfig.get("admin/gettableFiles", null ); - if( gettable != null ) { - log.warn( - "solrconfig.xml uses deprecated , Please "+ - "update your config to use the ShowFileRequestHandler." ); - if( getRequestHandler( "/admin/file" ) == null ) { - NamedList invariants = new NamedList<>(); - - // Hide everything... - Set hide = new HashSet<>(); - - for (String file : solrConfig.getResourceLoader().listConfigDir()) { - hide.add(file.toUpperCase(Locale.ROOT)); - } - - // except the "gettable" list - StringTokenizer st = new StringTokenizer( gettable ); - while( st.hasMoreTokens() ) { - hide.remove( st.nextToken().toUpperCase(Locale.ROOT) ); - } - for( String s : hide ) { - invariants.add( ShowFileRequestHandler.HIDDEN, s ); - } - - NamedList args = new NamedList<>(); - args.add( "invariants", invariants ); - ShowFileRequestHandler handler = new ShowFileRequestHandler(); - handler.init( args ); - reqHandlers.register("/admin/file", handler); - - log.warn( "adding ShowFileRequestHandler with hidden files: "+hide ); - } - } - - String facetSort = solrConfig.get("//bool[@name='facet.sort']", null); - if (facetSort != null) { - log.warn( - "solrconfig.xml uses deprecated . Please "+ - "update your config to use ."); - } - } - /** * Creates and initializes a RestManager based on configuration args in solrconfig.xml. * RestManager provides basic storage support for managed resource data, such as to From b6ffd27bf33adbb3ef92a41a36581c6f101535f2 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Mar 2016 10:54:29 -0500 Subject: [PATCH 0063/1113] LUCENE-7075: clean up LegacyNumeric* in .document javadocs --- .../org/apache/lucene/document/Document.java | 6 ++---- .../org/apache/lucene/document/Field.java | 19 ++++++++++++++----- .../apache/lucene/document/TestDocument.java | 4 ++-- 3 files changed, 18 insertions(+), 11 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/document/Document.java b/lucene/core/src/java/org/apache/lucene/document/Document.java index cdba083942fc..2f44444babeb 100644 --- a/lucene/core/src/java/org/apache/lucene/document/Document.java +++ b/lucene/core/src/java/org/apache/lucene/document/Document.java @@ -199,8 +199,7 @@ public final List getFields() { * Returns an array of values of the field specified as the method parameter. * This method returns an empty array when there are no * matching fields. It never returns null. - * For {@link LegacyIntField}, {@link LegacyLongField}, {@link - * LegacyFloatField} and {@link LegacyDoubleField} it returns the string value of the number. If you want + * For a numeric {@link StoredField} it returns the string value of the number. If you want * the actual numeric field instances back, use {@link #getFields}. * @param name the name of the field * @return a String[] of field values @@ -224,8 +223,7 @@ public final String[] getValues(String name) { * this document, or null. If multiple fields exist with this name, this * method returns the first value added. If only binary fields with this name * exist, returns null. - * For {@link LegacyIntField}, {@link LegacyLongField}, {@link - * LegacyFloatField} and {@link LegacyDoubleField} it returns the string value of the number. If you want + * For a numeric {@link StoredField} it returns the string value of the number. If you want * the actual numeric field instance back, use {@link #getField}. */ public final String get(String name) { diff --git a/lucene/core/src/java/org/apache/lucene/document/Field.java b/lucene/core/src/java/org/apache/lucene/document/Field.java index dff2e58aa6ef..550d1fd1797f 100644 --- a/lucene/core/src/java/org/apache/lucene/document/Field.java +++ b/lucene/core/src/java/org/apache/lucene/document/Field.java @@ -33,11 +33,20 @@ /** * Expert: directly create a field for a document. Most - * users should use one of the sugar subclasses: {@link - * LegacyIntField}, {@link LegacyLongField}, {@link LegacyFloatField}, {@link - * LegacyDoubleField}, {@link BinaryDocValuesField}, {@link - * NumericDocValuesField}, {@link SortedDocValuesField}, {@link - * StringField}, {@link TextField}, {@link StoredField}. + * users should use one of the sugar subclasses: + *
      + *
    • {@link TextField}: {@link Reader} or {@link String} indexed for full-text search + *
    • {@link StringField}: {@link String} indexed verbatim as a single token + *
    • {@link IntPoint}: {@code int} indexed for exact/range queries. + *
    • {@link LongPoint}: {@code long} indexed for exact/range queries. + *
    • {@link FloatPoint}: {@code float} indexed for exact/range queries. + *
    • {@link DoublePoint}: {@code double} indexed for exact/range queries. + *
    • {@link SortedDocValuesField}: {@code byte[]} indexed column-wise for sorting/faceting + *
    • {@link SortedSetDocValuesField}: {@code SortedSet} indexed column-wise for sorting/faceting + *
    • {@link NumericDocValuesField}: {@code long} indexed column-wise for sorting/faceting + *
    • {@link SortedNumericDocValuesField}: {@code SortedSet} indexed column-wise for sorting/faceting + *
    • {@link StoredField}: Stored-only value for retrieving in summary results + *
    * *

    A field is a section of a Document. Each field has three * parts: name, type and value. Values may be text diff --git a/lucene/core/src/test/org/apache/lucene/document/TestDocument.java b/lucene/core/src/test/org/apache/lucene/document/TestDocument.java index bd873ec8a9cd..50c1ed0a919a 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestDocument.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestDocument.java @@ -340,10 +340,10 @@ public void testInvalidFields() { public void testNumericFieldAsString() throws Exception { Document doc = new Document(); - doc.add(new LegacyIntField("int", 5, Field.Store.YES)); + doc.add(new StoredField("int", 5)); assertEquals("5", doc.get("int")); assertNull(doc.get("somethingElse")); - doc.add(new LegacyIntField("int", 4, Field.Store.YES)); + doc.add(new StoredField("int", 4)); assertArrayEquals(new String[] { "5", "4" }, doc.getValues("int")); Directory dir = newDirectory(); From f297e900c0d0e15d2b5f9a7bd7dbe1978315902b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Mar 2016 11:17:12 -0500 Subject: [PATCH 0064/1113] LUCENE-7075: clean up LegacyNumerics* usage in queries/ tests --- .../queries/function/FunctionTestSetup.java | 8 +++----- .../queries/function/TestFunctionQuerySort.java | 4 ++-- .../queries/function/TestValueSources.java | 16 ---------------- 3 files changed, 5 insertions(+), 23 deletions(-) diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java b/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java index d5a587df61ee..2764a8fc5b4a 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java @@ -21,12 +21,10 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.LegacyFloatField; -import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.StoredField; import org.apache.lucene.document.TextField; -import org.apache.lucene.document.Field.Store; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.queries.function.valuesource.FloatFieldSource; @@ -143,11 +141,11 @@ private static void addDoc(RandomIndexWriter iw, int i) throws Exception { f = newField(TEXT_FIELD, "text of doc" + scoreAndID + textLine(i), customType2); // for regular search d.add(f); - f = new LegacyIntField(INT_FIELD, scoreAndID, Store.YES); // for function scoring + f = new StoredField(INT_FIELD, scoreAndID); // for function scoring d.add(f); d.add(new NumericDocValuesField(INT_FIELD, scoreAndID)); - f = new LegacyFloatField(FLOAT_FIELD, scoreAndID, Store.YES); // for function scoring + f = new StoredField(FLOAT_FIELD, scoreAndID); // for function scoring d.add(f); d.add(new NumericDocValuesField(FLOAT_FIELD, Float.floatToRawIntBits(scoreAndID))); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java index b9e1eb2cade2..67f67b2dc573 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java @@ -20,8 +20,8 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.RandomIndexWriter; @@ -102,7 +102,7 @@ public void testSearchAfterWhenSortingByFunctionValues() throws IOException { RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); - Field field = new LegacyIntField("value", 0, Field.Store.YES); + Field field = new StoredField("value", 0); Field dvField = new NumericDocValuesField("value", 0); doc.add(field); doc.add(dvField); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java index 37a32da5db65..509e0ab3cb2f 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java @@ -24,11 +24,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; -import org.apache.lucene.document.LegacyDoubleField; import org.apache.lucene.document.Field; -import org.apache.lucene.document.LegacyFloatField; -import org.apache.lucene.document.LegacyIntField; -import org.apache.lucene.document.LegacyLongField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.StringField; @@ -120,20 +116,12 @@ public static void beforeClass() throws Exception { document.add(idField); Field idDVField = new SortedDocValuesField("id", new BytesRef()); document.add(idDVField); - Field doubleField = new LegacyDoubleField("double", 0d, Field.Store.NO); - document.add(doubleField); Field doubleDVField = new NumericDocValuesField("double", 0); document.add(doubleDVField); - Field floatField = new LegacyFloatField("float", 0f, Field.Store.NO); - document.add(floatField); Field floatDVField = new NumericDocValuesField("float", 0); document.add(floatDVField); - Field intField = new LegacyIntField("int", 0, Field.Store.NO); - document.add(intField); Field intDVField = new NumericDocValuesField("int", 0); document.add(intDVField); - Field longField = new LegacyLongField("long", 0L, Field.Store.NO); - document.add(longField); Field longDVField = new NumericDocValuesField("long", 0); document.add(longDVField); Field stringField = new StringField("string", "", Field.Store.NO); @@ -146,13 +134,9 @@ public static void beforeClass() throws Exception { for (String [] doc : documents) { idField.setStringValue(doc[0]); idDVField.setBytesValue(new BytesRef(doc[0])); - doubleField.setDoubleValue(Double.valueOf(doc[1])); doubleDVField.setLongValue(Double.doubleToRawLongBits(Double.valueOf(doc[1]))); - floatField.setFloatValue(Float.valueOf(doc[2])); floatDVField.setLongValue(Float.floatToRawIntBits(Float.valueOf(doc[2]))); - intField.setIntValue(Integer.valueOf(doc[3])); intDVField.setLongValue(Integer.valueOf(doc[3])); - longField.setLongValue(Long.valueOf(doc[4])); longDVField.setLongValue(Long.valueOf(doc[4])); stringField.setStringValue(doc[5]); stringDVField.setBytesValue(new BytesRef(doc[5])); From ae59bc0785b57ff5ce8cc4f88cd7728de3735e18 Mon Sep 17 00:00:00 2001 From: David Smiley Date: Tue, 8 Mar 2016 12:36:46 -0500 Subject: [PATCH 0065/1113] LUCENE-6952: Make most Filter* classes abstract. (cherry picked from commit 9393a31) --- lucene/CHANGES.txt | 4 ++++ .../org/apache/lucene/index/FilterCodecReader.java | 2 +- .../java/org/apache/lucene/index/FilterLeafReader.java | 10 +++++----- .../java/org/apache/lucene/search/FilterCollector.java | 2 +- .../org/apache/lucene/search/FilterLeafCollector.java | 2 +- .../java/org/apache/lucene/store/FilterDirectory.java | 2 +- .../org/apache/lucene/index/TestFilterLeafReader.java | 2 +- .../org/apache/lucene/store/TestFilterDirectory.java | 4 ++-- .../org/apache/lucene/index/MockRandomMergePolicy.java | 2 +- .../org/apache/lucene/mockfile/FilterFileChannel.java | 2 +- .../org/apache/lucene/mockfile/FilterFileStore.java | 2 +- .../org/apache/lucene/mockfile/FilterFileSystem.java | 2 +- .../lucene/mockfile/FilterFileSystemProvider.java | 2 +- .../apache/lucene/mockfile/FilterOutputStream2.java | 2 +- 14 files changed, 22 insertions(+), 18 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index ca59e6b8f1e1..3abd4fb5e80a 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -128,6 +128,10 @@ API Changes * LUCENE-7056: Geo3D classes are in different packages now. (David Smiley) +* LUCENE-6952: These classes are now abstract: FilterCodecReader, FilterLeafReader, + FilterCollector, FilterDirectory. And some Filter* classes in + lucene-test-framework too. (David Smiley) + Optimizations * LUCENE-6891: Use prefix coding when writing points in diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java index 41f09843aed7..c35dc6719c91 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java @@ -32,7 +32,7 @@ * uses as its basic source of data, possibly transforming the data along the * way or providing additional functionality. */ -public class FilterCodecReader extends CodecReader { +public abstract class FilterCodecReader extends CodecReader { /** * The underlying CodecReader instance. */ diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java index 98365a6cf658..1d593c32a127 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java @@ -44,7 +44,7 @@ * overridden as well if the {@link #getLiveDocs() live docs} are not changed * either. */ -public class FilterLeafReader extends LeafReader { +public abstract class FilterLeafReader extends LeafReader { /** Get the wrapped instance by reader as long as this reader is * an instance of {@link FilterLeafReader}. */ @@ -57,7 +57,7 @@ public static LeafReader unwrap(LeafReader reader) { /** Base class for filtering {@link Fields} * implementations. */ - public static class FilterFields extends Fields { + public abstract static class FilterFields extends Fields { /** The underlying Fields instance. */ protected final Fields in; @@ -93,7 +93,7 @@ public int size() { * these terms are going to be intersected with automata, you could consider * overriding {@link #intersect} for better performance. */ - public static class FilterTerms extends Terms { + public abstract static class FilterTerms extends Terms { /** The underlying Terms instance. */ protected final Terms in; @@ -160,7 +160,7 @@ public Object getStats() throws IOException { } /** Base class for filtering {@link TermsEnum} implementations. */ - public static class FilterTermsEnum extends TermsEnum { + public abstract static class FilterTermsEnum extends TermsEnum { /** The underlying TermsEnum instance. */ protected final TermsEnum in; @@ -223,7 +223,7 @@ public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { } /** Base class for filtering {@link PostingsEnum} implementations. */ - public static class FilterPostingsEnum extends PostingsEnum { + public abstract static class FilterPostingsEnum extends PostingsEnum { /** The underlying PostingsEnum instance. */ protected final PostingsEnum in; diff --git a/lucene/core/src/java/org/apache/lucene/search/FilterCollector.java b/lucene/core/src/java/org/apache/lucene/search/FilterCollector.java index d290330f5d69..d4ec91492675 100644 --- a/lucene/core/src/java/org/apache/lucene/search/FilterCollector.java +++ b/lucene/core/src/java/org/apache/lucene/search/FilterCollector.java @@ -26,7 +26,7 @@ * * @lucene.experimental */ -public class FilterCollector implements Collector { +public abstract class FilterCollector implements Collector { protected final Collector in; diff --git a/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java b/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java index ab15babad0be..b55410c87c72 100644 --- a/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java +++ b/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java @@ -24,7 +24,7 @@ * * @lucene.experimental */ -public class FilterLeafCollector implements LeafCollector { +public abstract class FilterLeafCollector implements LeafCollector { protected final LeafCollector in; diff --git a/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java b/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java index 5df571358aec..8148b5ac3a38 100644 --- a/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java +++ b/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java @@ -29,7 +29,7 @@ * {@link Directory} or {@link BaseDirectory} rather than try to reuse * functionality of existing {@link Directory}s by extending this class. * @lucene.internal */ -public class FilterDirectory extends Directory { +public abstract class FilterDirectory extends Directory { /** Get the wrapped instance by dir as long as this reader is * an instance of {@link FilterDirectory}. */ diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java index 82fb3bce2f67..cad47a4e1268 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java @@ -196,7 +196,7 @@ public void testUnwrap() throws IOException { w.addDocument(new Document()); DirectoryReader dr = w.getReader(); LeafReader r = dr.leaves().get(0).reader(); - FilterLeafReader r2 = new FilterLeafReader(r); + FilterLeafReader r2 = new FilterLeafReader(r) {}; assertEquals(r, r2.getDelegate()); assertEquals(r, FilterLeafReader.unwrap(r2)); w.close(); diff --git a/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java index 7fe9bc2be0d4..6224140e3ca4 100644 --- a/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java +++ b/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java @@ -29,7 +29,7 @@ public class TestFilterDirectory extends BaseDirectoryTestCase { @Override protected Directory getDirectory(Path path) throws IOException { - return new FilterDirectory(new RAMDirectory()); + return new FilterDirectory(new RAMDirectory()) {}; } @Test @@ -48,7 +48,7 @@ public void testOverrides() throws Exception { public void testUnwrap() throws IOException { Directory dir = FSDirectory.open(createTempDir()); - FilterDirectory dir2 = new FilterDirectory(dir); + FilterDirectory dir2 = new FilterDirectory(dir) {}; assertEquals(dir, dir2.getDelegate()); assertEquals(dir, FilterDirectory.unwrap(dir2)); dir2.close(); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java index bcee1b6c4e18..b40ac2685d51 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java @@ -159,7 +159,7 @@ public List getMergeReaders() throws IOException { if (LuceneTestCase.VERBOSE) { System.out.println("NOTE: MockRandomMergePolicy now swaps in a SlowCodecReaderWrapper for merging reader=" + readers.get(i)); } - readers.set(i, SlowCodecReaderWrapper.wrap(new FilterLeafReader(readers.get(i)))); + readers.set(i, SlowCodecReaderWrapper.wrap(new FilterLeafReader(readers.get(i)) {})); } else if (thingToDo == 1) { // renumber fields // NOTE: currently this only "blocks" bulk merges just by diff --git a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileChannel.java b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileChannel.java index 0c95af0ee637..ccc6e7abc9eb 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileChannel.java +++ b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileChannel.java @@ -31,7 +31,7 @@ * source of data, possibly transforming the data along the * way or providing additional functionality. */ -public class FilterFileChannel extends FileChannel { +public abstract class FilterFileChannel extends FileChannel { /** * The underlying {@code FileChannel} instance. diff --git a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileStore.java b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileStore.java index dc90799d6ece..423b32d75ba1 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileStore.java +++ b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileStore.java @@ -28,7 +28,7 @@ * source of data, possibly transforming the data along the * way or providing additional functionality. */ -public class FilterFileStore extends FileStore { +public abstract class FilterFileStore extends FileStore { /** * The underlying {@code FileStore} instance. diff --git a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystem.java b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystem.java index d79ed35e7f50..e24506d7cd52 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystem.java +++ b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystem.java @@ -131,7 +131,7 @@ public boolean hasNext() { @Override public FileStore next() { - return new FilterFileStore(iterator.next(), parent.getScheme()); + return new FilterFileStore(iterator.next(), parent.getScheme()) {}; } @Override diff --git a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystemProvider.java b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystemProvider.java index c9c016551c85..8a7ff754e014 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystemProvider.java +++ b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterFileSystemProvider.java @@ -48,7 +48,7 @@ * source of data, possibly transforming the data along the * way or providing additional functionality. */ -public class FilterFileSystemProvider extends FileSystemProvider { +public abstract class FilterFileSystemProvider extends FileSystemProvider { /** * The underlying {@code FileSystemProvider}. diff --git a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterOutputStream2.java b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterOutputStream2.java index dbf7a95bb718..5413c87dd710 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterOutputStream2.java +++ b/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterOutputStream2.java @@ -34,7 +34,7 @@ * that just overrides {@code close} will not force bytes to be * written one-at-a-time. */ -public class FilterOutputStream2 extends OutputStream { +public abstract class FilterOutputStream2 extends OutputStream { /** * The underlying {@code OutputStream} instance. From 0ab23fd1f5e38cdd4fe742f70f66d97e666b9e80 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Tue, 8 Mar 2016 13:28:28 -0500 Subject: [PATCH 0066/1113] LUCENE-7083: default points merge logic should not ask a reader to merge points on a field that doesn't exist in that segment --- .../apache/lucene/codecs/PointsWriter.java | 6 ++++++ .../org/apache/lucene/index/IndexWriter.java | 1 + .../apache/lucene/index/TestPointValues.java | 20 +++++++++++++++++++ 3 files changed, 27 insertions(+) diff --git a/lucene/core/src/java/org/apache/lucene/codecs/PointsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/PointsWriter.java index 56689ecae7c7..53db281777af 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/PointsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/PointsWriter.java @@ -54,6 +54,12 @@ public void intersect(String fieldName, IntersectVisitor mergedVisitor) throws I // This segment has no points continue; } + FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(fieldName); + if (readerFieldInfo == null) { + // This segment never saw this field + continue; + } + MergeState.DocMap docMap = mergeState.docMaps[i]; int docBase = mergeState.docBase[i]; pointsReader.intersect(fieldInfo.name, diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index 57ce3ddea6d1..66a0e73b601b 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -4116,6 +4116,7 @@ private int mergeMiddle(MergePolicy.OneMerge merge, MergePolicy mergePolicy) thr (mergeState.mergeFieldInfos.hasDocValues() ? "docValues" : "no docValues") + "; " + (mergeState.mergeFieldInfos.hasProx() ? "prox" : "no prox") + "; " + (mergeState.mergeFieldInfos.hasProx() ? "freqs" : "no freqs") + "; " + + (mergeState.mergeFieldInfos.hasPointValues() ? "points" : "no points") + "; " + String.format(Locale.ROOT, "%.1f sec (%.1f sec stopped, %.1f sec paused) to merge segment [%.2f MB, %.2f MB/sec]", sec, diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java index 7231b1afc6fa..9ced11a07187 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java @@ -39,6 +39,7 @@ import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.PointValues; import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -562,4 +563,23 @@ public void testDeleteAllPointDocs() throws Exception { r.close(); dir.close(); } + + public void testPointsFieldMissingFromOneSegment() throws Exception { + Directory dir = FSDirectory.open(createTempDir()); + IndexWriterConfig iwc = new IndexWriterConfig(null); + IndexWriter w = new IndexWriter(dir, iwc); + Document doc = new Document(); + doc.add(new StringField("id", "0", Field.Store.NO)); + doc.add(new IntPoint("int0", 0)); + w.addDocument(doc); + w.commit(); + + doc = new Document(); + doc.add(new IntPoint("int1", 17)); + w.addDocument(doc); + w.forceMerge(1); + + w.close(); + dir.close(); + } } From 7c527cc346c0863c4b0289fc23d8870290b41233 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Tue, 8 Mar 2016 13:34:08 -0500 Subject: [PATCH 0067/1113] don't use slow composite reader in this test --- .../lucene/index/ThreadedIndexingAndSearchingTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java index a3e049de3151..80c3903e4828 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java @@ -480,7 +480,7 @@ public void warm(LeafReader reader) throws IOException { } } - IndexSearcher searcher = newSearcher(reader); + IndexSearcher searcher = newSearcher(reader, false); sum += searcher.search(new TermQuery(new Term("body", "united")), 10).totalHits; if (VERBOSE) { From 7c69f272337fb16d7aea0995aed48d19ff62b1ec Mon Sep 17 00:00:00 2001 From: David Smiley Date: Tue, 8 Mar 2016 14:36:04 -0500 Subject: [PATCH 0068/1113] SOLR-6926: fix smokeTestRelease.py to stop calling ant example. (cherry picked from commit e490b32) --- dev-tools/scripts/smokeTestRelease.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dev-tools/scripts/smokeTestRelease.py b/dev-tools/scripts/smokeTestRelease.py index aa367f6d5313..c5b8d6e2f3cd 100644 --- a/dev-tools/scripts/smokeTestRelease.py +++ b/dev-tools/scripts/smokeTestRelease.py @@ -700,7 +700,7 @@ def verifyUnpacked(java, project, artifact, unpackPath, gitRevision, version, te checkJavadocpathFull('%s/solr/build/docs' % unpackPath, False) print(' test solr example w/ Java 8...') - java.run_java8('ant clean example', '%s/antexample.log' % unpackPath) + java.run_java8('ant clean server', '%s/antexample.log' % unpackPath) testSolrExample(unpackPath, java.java8_home, True) os.chdir('..') @@ -782,6 +782,7 @@ def readSolrOutput(p, startupEvent, failureEvent, logFile): f.close() def testSolrExample(unpackPath, javaPath, isSrc): + # test solr using some examples it comes with logFile = '%s/solr-example.log' % unpackPath if isSrc: os.chdir(unpackPath+'/solr') From 122c24807196205f2bf65d37e6565caa7c6be987 Mon Sep 17 00:00:00 2001 From: Shai Erera Date: Tue, 8 Mar 2016 22:11:18 +0200 Subject: [PATCH 0069/1113] SOLR-8793: Fix stale commit files' size computation in LukeRequestHandler --- solr/CHANGES.txt | 5 +++++ .../solr/handler/admin/LukeRequestHandler.java | 12 +++++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index a002efa8282e..818cf72c235a 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -433,6 +433,11 @@ Bug Fixes * SOLR-8712: Variable solr.core.instanceDir was not being resolved (Kristine Jetzke, Shawn Heisey, Alan Woodward) +* SOLR-8793: Fix Core admin status API to not fail when computing the size of the segments_N + file if the file no longer exists (for example, if a commit happened and the IndexReader + hasn't refreshed yet). In this case the reported size of the file is -1. + (Shai Erera, Alexey Serba, Richard Coggins) + ======================= 5.5.0 ======================= Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java index 0ec6d79fecbb..450a5052d1a3 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java @@ -582,7 +582,7 @@ public static SimpleOrderedMap getIndexInfo(DirectoryReader reader) thro IndexCommit indexCommit = reader.getIndexCommit(); String segmentsFileName = indexCommit.getSegmentsFileName(); indexInfo.add("segmentsFile", segmentsFileName); - indexInfo.add("segmentsFileSizeInBytes", indexCommit.getDirectory().fileLength(segmentsFileName)); + indexInfo.add("segmentsFileSizeInBytes", getFileLength(indexCommit.getDirectory(), segmentsFileName)); Map userData = indexCommit.getUserData(); indexInfo.add("userData", userData); String s = userData.get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY); @@ -592,6 +592,16 @@ public static SimpleOrderedMap getIndexInfo(DirectoryReader reader) thro return indexInfo; } + private static long getFileLength(Directory dir, String filename) { + try { + return dir.fileLength(filename); + } catch (IOException e) { + // Whatever the error is, only log it and return -1. + log.warn("Error getting file length for [{}]", filename, e); + return -1; + } + } + /** Returns the sum of RAM bytes used by each segment */ private static long getIndexHeapUsed(DirectoryReader reader) { long indexHeapRamBytesUsed = 0; From 6eea1f94c468d060bbac4f23de4e1b9d64c691f1 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Tue, 8 Mar 2016 15:21:37 -0500 Subject: [PATCH 0070/1113] improve testing for sparse points --- .../index/TestBackwardsCompatibility.java | 2 +- .../index/TestFlushByRamOrCountsPolicy.java | 2 +- .../lucene/index/TestForceMergeForever.java | 2 +- .../org/apache/lucene/index/TestNorms.java | 2 +- .../apache/lucene/index/TestPointValues.java | 46 +++++++++++++++ .../lucene/index/TestRollingUpdates.java | 2 +- .../apache/lucene/index/TestTermsEnum.java | 2 +- .../lucene/store/TestNRTCachingDirectory.java | 2 +- .../org/apache/lucene/util/fst/TestFSTs.java | 2 +- .../analyzing/TestFreeTextSuggester.java | 2 +- .../ThreadedIndexingAndSearchingTestCase.java | 2 +- .../lucene/search/ShardSearchingTestBase.java | 2 +- .../org/apache/lucene/util/LineFileDocs.java | 58 ++++++++++++------- 13 files changed, 95 insertions(+), 31 deletions(-) diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java index a2a749187913..6121bca6e5c9 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java @@ -148,7 +148,7 @@ public void testCreateMoreTermsIndex() throws Exception { IndexWriterConfig conf = new IndexWriterConfig(analyzer) .setMergePolicy(mp).setUseCompoundFile(false); IndexWriter writer = new IndexWriter(dir, conf); - LineFileDocs docs = new LineFileDocs(null, true); + LineFileDocs docs = new LineFileDocs(null); for(int i=0;i<50;i++) { writer.addDocument(docs.nextDoc()); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java index 562913e079ac..993a521a36e3 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestFlushByRamOrCountsPolicy.java @@ -39,7 +39,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase { @BeforeClass public static void beforeClass() throws Exception { - lineDocFile = new LineFileDocs(random(), true); + lineDocFile = new LineFileDocs(random()); } @AfterClass diff --git a/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java b/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java index 3edeef15f618..037939534f17 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestForceMergeForever.java @@ -62,7 +62,7 @@ public void test() throws Exception { // Try to make an index that requires merging: w.getConfig().setMaxBufferedDocs(TestUtil.nextInt(random(), 2, 11)); final int numStartDocs = atLeast(20); - final LineFileDocs docs = new LineFileDocs(random(), true); + final LineFileDocs docs = new LineFileDocs(random()); for(int docIDX=0;docIDX Date: Tue, 8 Mar 2016 15:21:19 -0500 Subject: [PATCH 0071/1113] SOLR-8799: Improve error message when tuple can't be read by SolrJ JDBC --- .../solr/client/solrj/io/sql/ResultSetImpl.java | 2 +- .../apache/solr/client/solrj/io/sql/JdbcTest.java | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java index e2f8cf04f4be..0aa3a4bc5173 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/ResultSetImpl.java @@ -78,7 +78,7 @@ class ResultSetImpl implements ResultSet { this.firstTuple = this.solrStream.read(); this.solrStream.pushBack(firstTuple); } catch (IOException e) { - throw new SQLException("Couldn't read first tuple", e); + throw new SQLException(e); } this.resultSetMetaData = new ResultSetMetaDataImpl(this); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java index e1e9739511f3..572491e31e8f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java @@ -396,6 +396,21 @@ public void doTest() throws Exception { } } + + //Test error propagation + props = new Properties(); + props.put("aggregationMode", "facet"); + try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props)) { + try (Statement stmt = con.createStatement()) { + try (ResultSet rs = stmt.executeQuery("select crap from collection1 group by a_s " + + "order by sum(a_f) desc")) { + } catch (Exception e) { + String errorMessage = e.getMessage(); + assertTrue(errorMessage.contains("Group by queries must include atleast one aggregate function")); + } + } + } + testDriverMetadata(); } From d0427d8617565cb4782faf1c93cf0ac0a6207680 Mon Sep 17 00:00:00 2001 From: jbernste Date: Tue, 8 Mar 2016 15:25:52 -0500 Subject: [PATCH 0072/1113] SOLR-8799: Update CHANGES.txt --- solr/CHANGES.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 818cf72c235a..5ee5b979aa03 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -418,6 +418,8 @@ Other Changes * SOLR-8766: Remove deprecated tag in solrconfig.xml and support for admin/gettableFiles (noble, Jason Gerlowski, Varun Thacker) +* SOLR-8799: Improve error message when tuple can't be read by SolrJ JDBC (Kevin Risden, Joel Bernstein) + ================== 5.5.1 ================== Bug Fixes From acbabac7b574c792daccddfecf29f1adb3da1ec5 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Tue, 8 Mar 2016 17:21:12 -0500 Subject: [PATCH 0073/1113] fix random float test to do the +/- 1 ulp in float space --- .../lucene/facet/range/TestRangeFacetCounts.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java index 9fde6e3ed9f1..9f8b1096ac90 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java @@ -532,6 +532,8 @@ public void testRandomFloats() throws Exception { int[] expectedCounts = new int[numRange]; float minAcceptedValue = Float.POSITIVE_INFINITY; float maxAcceptedValue = Float.NEGATIVE_INFINITY; + boolean[] rangeMinIncl = new boolean[numRange]; + boolean[] rangeMaxIncl = new boolean[numRange]; if (VERBOSE) { System.out.println("TEST: " + numRange + " ranges"); } @@ -582,6 +584,8 @@ public void testRandomFloats() throws Exception { minIncl = random().nextBoolean(); maxIncl = random().nextBoolean(); } + rangeMinIncl[rangeID] = minIncl; + rangeMaxIncl[rangeID] = maxIncl; ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, maxIncl); if (VERBOSE) { @@ -642,7 +646,17 @@ public void testRandomFloats() throws Exception { // Test drill-down: DrillDownQuery ddq = new DrillDownQuery(config); if (random().nextBoolean()) { - ddq.add("field", FloatPoint.newRangeQuery("field", (float) range.min, (float) range.max)); + // We must do the nextUp/down in float space, here, because the nextUp that DoubleRange did in double space, when cast back to float, + // in fact does nothing! + float minFloat = (float) range.min; + if (rangeMinIncl[rangeID] == false) { + minFloat = Math.nextUp(minFloat); + } + float maxFloat = (float) range.max; + if (rangeMaxIncl[rangeID] == false) { + maxFloat = Math.nextAfter(maxFloat, Float.NEGATIVE_INFINITY); + } + ddq.add("field", FloatPoint.newRangeQuery("field", minFloat, maxFloat)); } else { ddq.add("field", range.getQuery(fastMatchQuery, vs)); } From 885ad49105ca98cdf2d530005433902ea4e24ffb Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Tue, 8 Mar 2016 17:30:30 -0500 Subject: [PATCH 0074/1113] remove troublesome float tests since facets only actually expose doubles --- .../facet/range/TestRangeFacetCounts.java | 203 ------------------ 1 file changed, 203 deletions(-) diff --git a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java index 9f8b1096ac90..626d772d6a9e 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java @@ -23,11 +23,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import org.apache.lucene.document.DoublePoint; -import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleDocValuesField; -import org.apache.lucene.document.FloatDocValuesField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.facet.DrillDownQuery; import org.apache.lucene.facet.DrillSideways; @@ -52,7 +50,6 @@ import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.apache.lucene.queries.function.valuesource.DoubleFieldSource; -import org.apache.lucene.queries.function.valuesource.FloatFieldSource; import org.apache.lucene.queries.function.valuesource.LongFieldSource; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -321,37 +318,6 @@ public void testBasicDouble() throws Exception { IOUtils.close(r, d); } - public void testBasicFloat() throws Exception { - Directory d = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), d); - Document doc = new Document(); - FloatDocValuesField field = new FloatDocValuesField("field", 0.0f); - doc.add(field); - for(long l=0;l<100;l++) { - field.setFloatValue(l); - w.addDocument(doc); - } - - IndexReader r = w.getReader(); - - FacetsCollector fc = new FacetsCollector(); - - IndexSearcher s = newSearcher(r); - s.search(new MatchAllDocsQuery(), fc); - - Facets facets = new DoubleRangeFacetCounts("field", new FloatFieldSource("field"), fc, - new DoubleRange("less than 10", 0.0f, true, 10.0f, false), - new DoubleRange("less than or equal to 10", 0.0f, true, 10.0f, true), - new DoubleRange("over 90", 90.0f, false, 100.0f, false), - new DoubleRange("90 or above", 90.0f, true, 100.0f, false), - new DoubleRange("over 1000", 1000.0f, false, Double.POSITIVE_INFINITY, false)); - - assertEquals("dim=field path=[] value=21 childCount=5\n less than 10 (10)\n less than or equal to 10 (11)\n over 90 (9)\n 90 or above (10)\n over 1000 (0)\n", - facets.getTopChildren(10, "field").toString()); - w.close(); - IOUtils.close(r, d); - } - public void testRandomLongs() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); @@ -499,175 +465,6 @@ public void testRandomLongs() throws Exception { IOUtils.close(r, dir); } - public void testRandomFloats() throws Exception { - Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir); - - int numDocs = atLeast(1000); - float[] values = new float[numDocs]; - float minValue = Float.POSITIVE_INFINITY; - float maxValue = Float.NEGATIVE_INFINITY; - for(int i=0;i 0 && random().nextInt(10) == 7) { - // Use an existing boundary: - DoubleRange prevRange = ranges[random().nextInt(rangeID)]; - if (random().nextBoolean()) { - min = prevRange.min; - } else { - min = prevRange.max; - } - } else { - min = random().nextDouble(); - } - double max; - if (rangeID > 0 && random().nextInt(10) == 7) { - // Use an existing boundary: - DoubleRange prevRange = ranges[random().nextInt(rangeID)]; - if (random().nextBoolean()) { - max = prevRange.min; - } else { - max = prevRange.max; - } - } else { - max = random().nextDouble(); - } - - if (min > max) { - double x = min; - min = max; - max = x; - } - - // Must truncate to float precision so that the - // drill-down counts (which use NRQ.newFloatRange) - // are correct: - min = (float) min; - max = (float) max; - - boolean minIncl; - boolean maxIncl; - if (min == max) { - minIncl = true; - maxIncl = true; - } else { - minIncl = random().nextBoolean(); - maxIncl = random().nextBoolean(); - } - rangeMinIncl[rangeID] = minIncl; - rangeMaxIncl[rangeID] = maxIncl; - ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, maxIncl); - - if (VERBOSE) { - System.out.println("TEST: range " + rangeID + ": " + ranges[rangeID]); - } - - // Do "slow but hopefully correct" computation of - // expected count: - for(int i=0;i= min; - } else { - accept &= values[i] > min; - } - if (maxIncl) { - accept &= values[i] <= max; - } else { - accept &= values[i] < max; - } - if (VERBOSE) { - System.out.println("TEST: check doc=" + i + " val=" + values[i] + " accept=" + accept); - } - if (accept) { - expectedCounts[rangeID]++; - minAcceptedValue = Math.min(minAcceptedValue, values[i]); - maxAcceptedValue = Math.max(maxAcceptedValue, values[i]); - } - } - } - - FacetsCollector sfc = new FacetsCollector(); - s.search(new MatchAllDocsQuery(), sfc); - Query fastMatchQuery; - if (random().nextBoolean()) { - if (random().nextBoolean()) { - fastMatchQuery = FloatPoint.newRangeQuery("field", minValue, maxValue); - } else { - fastMatchQuery = FloatPoint.newRangeQuery("field", minAcceptedValue, maxAcceptedValue); - } - } else { - fastMatchQuery = null; - } - ValueSource vs = new FloatFieldSource("field"); - Facets facets = new DoubleRangeFacetCounts("field", vs, sfc, fastMatchQuery, ranges); - FacetResult result = facets.getTopChildren(10, "field"); - assertEquals(numRange, result.labelValues.length); - for(int rangeID=0;rangeID Date: Wed, 9 Mar 2016 10:56:13 +0100 Subject: [PATCH 0075/1113] LUCENE-7080: Sort files to corrupt to prevent HashSet iteration order issues across JVMs --- .../java/org/apache/lucene/store/MockDirectoryWrapper.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java index 962062e0312b..7fe7c3b0d5a3 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java +++ b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java @@ -45,6 +45,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoDeletionPolicy; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -296,7 +297,11 @@ public synchronized void corruptUnknownFiles() throws IOException { public synchronized void corruptFiles(Collection files) throws IOException { // Must make a copy because we change the incoming unsyncedFiles // when we create temp files, delete, etc., below: - for(String name : new ArrayList<>(files)) { + final List filesToCorrupt = new ArrayList<>(files); + // sort the files otherwise we have reproducibility issues + // across JVMs if the incoming collection is a hashSet etc. + CollectionUtil.timSort(filesToCorrupt); + for(String name : filesToCorrupt) { int damage = randomState.nextInt(6); String action = null; From c1277cda1116a44b3371a3fa8364cc2032e14273 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 7 Mar 2016 19:53:09 +0000 Subject: [PATCH 0076/1113] SOLR-8765: Enforce required parameters in SolrJ Collections API --- solr/CHANGES.txt | 4 + .../org/apache/solr/core/CoreContainer.java | 9 +- .../handler/admin/CollectionsHandler.java | 101 ++-- .../apache/solr/cloud/DeleteStatusTest.java | 172 +++--- .../solrj/request/CollectionAdminRequest.java | 546 ++++++++++++++++-- .../solrj/request/CoreAdminRequest.java | 23 +- .../solrj/util/SolrIdentifierValidator.java | 26 +- 7 files changed, 674 insertions(+), 207 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 5ee5b979aa03..feb0e10dc2ea 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -24,6 +24,10 @@ Detailed Change List .processAndWait() to wait for a call to finish without holding HTTP collections open. (Alan Woodward) +* SOLR-8765: Enforce required parameters at query construction time in the SolrJ + Collections API, add static factory methods, and deprecate old setter methods. + (Alan Woodward, Jason Gerlowski) + New Features ---------------------- diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index 9ff45eaf1c28..1d614e343dca 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -804,9 +804,7 @@ private SolrCore create(CoreDescriptor dcore, boolean publishState) { SolrCore core = null; try { MDCLoggingContext.setCore(core); - if (!SolrIdentifierValidator.validateCoreName(dcore.getName())) { - throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE, dcore.getName())); - } + SolrIdentifierValidator.validateCoreName(dcore.getName()); if (zkSys.getZkController() != null) { zkSys.getZkController().preRegister(dcore); } @@ -1009,10 +1007,7 @@ public void unload(String name, boolean deleteIndexDir, boolean deleteDataDir, b } public void rename(String name, String toName) { - if (!SolrIdentifierValidator.validateCoreName(toName)) { - throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE, - toName)); - } + SolrIdentifierValidator.validateCoreName(toName); try (SolrCore core = getCore(name)) { if (core != null) { registerCore(toName, core, true); diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java index 593dac81bcc6..06968c381484 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java @@ -16,45 +16,8 @@ */ package org.apache.solr.handler.admin; -import static org.apache.solr.client.solrj.response.RequestStatusState.*; -import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_PROP_PREFIX; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET_SHUFFLE; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_ACTIVE_NODES; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_IF_DOWN; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.REQUESTID; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARDS_PROP; -import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARD_UNIQUE; -import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER; -import static org.apache.solr.common.cloud.DocCollection.RULE; -import static org.apache.solr.common.cloud.DocCollection.SNITCH; -import static org.apache.solr.common.cloud.DocCollection.STATE_FORMAT; -import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS; -import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE; -import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR; -import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP; -import static org.apache.solr.common.params.CollectionParams.CollectionAction.*; -import static org.apache.solr.common.params.CommonAdminParams.ASYNC; -import static org.apache.solr.common.params.CommonParams.NAME; -import static org.apache.solr.common.params.CommonParams.VALUE_LONG; -import static org.apache.solr.common.params.CoreAdminParams.DATA_DIR; -import static org.apache.solr.common.params.CoreAdminParams.DELETE_DATA_DIR; -import static org.apache.solr.common.params.CoreAdminParams.DELETE_INDEX; -import static org.apache.solr.common.params.CoreAdminParams.DELETE_INSTANCE_DIR; -import static org.apache.solr.common.params.CoreAdminParams.INSTANCE_DIR; -import static org.apache.solr.common.params.ShardParams._ROUTE_; -import static org.apache.solr.common.util.StrUtils.formatString; - import java.io.IOException; import java.lang.invoke.MethodHandles; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -66,6 +29,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.solr.client.solrj.SolrResponse; @@ -117,8 +82,45 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; +import static org.apache.solr.client.solrj.response.RequestStatusState.COMPLETED; +import static org.apache.solr.client.solrj.response.RequestStatusState.FAILED; +import static org.apache.solr.client.solrj.response.RequestStatusState.NOT_FOUND; +import static org.apache.solr.client.solrj.response.RequestStatusState.RUNNING; +import static org.apache.solr.client.solrj.response.RequestStatusState.SUBMITTED; +import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_PROP_PREFIX; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET_SHUFFLE; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_ACTIVE_NODES; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_IF_DOWN; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.REQUESTID; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARDS_PROP; +import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SHARD_UNIQUE; +import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER; +import static org.apache.solr.common.cloud.DocCollection.RULE; +import static org.apache.solr.common.cloud.DocCollection.SNITCH; +import static org.apache.solr.common.cloud.DocCollection.STATE_FORMAT; +import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS; +import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP; +import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE; +import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP; +import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP; +import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR; +import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP; +import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP; +import static org.apache.solr.common.params.CollectionParams.CollectionAction.*; +import static org.apache.solr.common.params.CommonAdminParams.ASYNC; +import static org.apache.solr.common.params.CommonParams.NAME; +import static org.apache.solr.common.params.CommonParams.VALUE_LONG; +import static org.apache.solr.common.params.CoreAdminParams.DATA_DIR; +import static org.apache.solr.common.params.CoreAdminParams.DELETE_DATA_DIR; +import static org.apache.solr.common.params.CoreAdminParams.DELETE_INDEX; +import static org.apache.solr.common.params.CoreAdminParams.DELETE_INSTANCE_DIR; +import static org.apache.solr.common.params.CoreAdminParams.INSTANCE_DIR; +import static org.apache.solr.common.params.ShardParams._ROUTE_; +import static org.apache.solr.common.util.StrUtils.formatString; public class CollectionsHandler extends RequestHandlerBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -348,11 +350,7 @@ Map call(SolrQueryRequest req, SolrQueryResponse rsp, Collection addMapObject(props, RULE); addMapObject(props, SNITCH); verifyRuleParams(h.coreContainer, props); - final String collectionName = (String) props.get(NAME); - if (!SolrIdentifierValidator.validateCollectionName(collectionName)) { - throw new SolrException(ErrorCode.BAD_REQUEST, - SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.COLLECTION, collectionName)); - } + final String collectionName = SolrIdentifierValidator.validateCollectionName((String)props.get(NAME)); final String shardsParam = (String) props.get(SHARDS_PROP); if (StringUtils.isNotEmpty(shardsParam)) { verifyShardsParam(shardsParam); @@ -433,10 +431,7 @@ Map call(SolrQueryRequest req, SolrQueryResponse rsp, Collection @Override Map call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception { - final String aliasName = req.getParams().get(NAME); - if (!SolrIdentifierValidator.validateCollectionName(aliasName)) { - throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.ALIAS, aliasName)); - } + final String aliasName = SolrIdentifierValidator.validateAliasName(req.getParams().get(NAME)); return req.getParams().required().getAll(null, NAME, "collections"); } }, @@ -505,11 +500,7 @@ Map call(SolrQueryRequest req, SolrQueryResponse rsp, Collection COLLECTION_PROP, SHARD_ID_PROP); ClusterState clusterState = handler.coreContainer.getZkController().getClusterState(); - final String newShardName = req.getParams().get(SHARD_ID_PROP); - if (!SolrIdentifierValidator.validateShardName(newShardName)) { - throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD, - newShardName)); - } + final String newShardName = SolrIdentifierValidator.validateShardName(req.getParams().get(SHARD_ID_PROP)); if (!ImplicitDocRouter.NAME.equals(((Map) clusterState.getCollection(req.getParams().get(COLLECTION_PROP)).get(DOC_ROUTER)).get(NAME))) throw new SolrException(ErrorCode.BAD_REQUEST, "shards can be added only to 'implicit' collections"); req.getParams().getAll(map, @@ -997,9 +988,7 @@ private static Map addMapObject(Map props, Strin private static void verifyShardsParam(String shardsParam) { for (String shard : shardsParam.split(",")) { - if (!SolrIdentifierValidator.validateShardName(shard)) - throw new SolrException(ErrorCode.BAD_REQUEST, SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD, - shard)); + SolrIdentifierValidator.validateShardName(shard); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java index 16ca35a0865b..3b8e014c0a74 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java @@ -17,101 +17,129 @@ package org.apache.solr.cloud; import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.client.solrj.response.RequestStatusState; +import org.junit.BeforeClass; import org.junit.Test; -public class DeleteStatusTest extends AbstractFullDistribZkTestBase { +public class DeleteStatusTest extends SolrCloudTestCase { + + public static final int MAX_WAIT_TIMEOUT = 30; + + @BeforeClass + public static void createCluster() throws Exception { + configureCluster(2) + .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .configure(); + } + + // Basically equivalent to RequestStatus.waitFor(), but doesn't delete the id from the queue + private static RequestStatusState waitForRequestState(String id, SolrClient client, int timeout) + throws IOException, SolrServerException, InterruptedException { + RequestStatusState state = RequestStatusState.SUBMITTED; + long endTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(MAX_WAIT_TIMEOUT); + while (System.nanoTime() < endTime) { + state = CollectionAdminRequest.requestStatus(id).process(client).getRequestStatus(); + if (state == RequestStatusState.COMPLETED) + break; + assumeTrue("Error creating collection - skipping test", state != RequestStatusState.FAILED); + TimeUnit.SECONDS.sleep(1); + } + assumeTrue("Timed out creating collection - skipping test", state == RequestStatusState.COMPLETED); + return state; + } @Test - public void testDeleteStatus() throws IOException, SolrServerException { - CollectionAdminRequest.Create create = new CollectionAdminRequest.Create(); - create.setCollectionName("requeststatus") - .setConfigName("conf1") - .setReplicationFactor(1) - .setNumShards(1) - .setAsyncId("collectioncreate") - .process(cloudClient); - - RequestStatusState state = getRequestStateAfterCompletion("collectioncreate", 30, cloudClient); - assertSame(RequestStatusState.COMPLETED, state); + public void testAsyncIdsMayBeDeleted() throws Exception { + + final CloudSolrClient client = cluster.getSolrClient(); + + final String collection = "deletestatus"; + final String asyncId = CollectionAdminRequest.createCollection(collection, "conf1", 1, 1).processAsync(client); + + waitForRequestState(asyncId, client, MAX_WAIT_TIMEOUT); - // Let's delete the stored response now - CollectionAdminRequest.DeleteStatus deleteStatus = new CollectionAdminRequest.DeleteStatus(); - CollectionAdminResponse rsp = deleteStatus - .setRequestId("collectioncreate") - .process(cloudClient); - assertEquals("successfully removed stored response for [collectioncreate]", rsp.getResponse().get("status")); - - // Make sure that the response was deleted from zk - state = getRequestState("collectioncreate", cloudClient); - assertSame(RequestStatusState.NOT_FOUND, state); - - // Try deleting the same requestid again - deleteStatus = new CollectionAdminRequest.DeleteStatus(); - rsp = deleteStatus - .setRequestId("collectioncreate") - .process(cloudClient); - assertEquals("[collectioncreate] not found in stored responses", rsp.getResponse().get("status")); - - // Let's try deleting a non-existent status - deleteStatus = new CollectionAdminRequest.DeleteStatus(); - rsp = deleteStatus - .setRequestId("foo") - .process(cloudClient); + assertEquals(RequestStatusState.COMPLETED, + CollectionAdminRequest.requestStatus(asyncId).process(client).getRequestStatus()); + + CollectionAdminResponse rsp = CollectionAdminRequest.deleteAsyncId(asyncId).process(client); + assertEquals("successfully removed stored response for [" + asyncId + "]", rsp.getResponse().get("status")); + + assertEquals(RequestStatusState.NOT_FOUND, + CollectionAdminRequest.requestStatus(asyncId).process(client).getRequestStatus()); + + } + + @Test + public void testDeletingNonExistentRequests() throws Exception { + + final CloudSolrClient client = cluster.getSolrClient(); + + CollectionAdminResponse rsp = CollectionAdminRequest.deleteAsyncId("foo").process(client); assertEquals("[foo] not found in stored responses", rsp.getResponse().get("status")); + + } + + @Test + public void testProcessAndWaitDeletesAsyncIds() throws IOException, SolrServerException, InterruptedException { + + final CloudSolrClient client = cluster.getSolrClient(); + + RequestStatusState state = CollectionAdminRequest.createCollection("requeststatus", "conf1", 1, 1) + .processAndWait("request1", client, MAX_WAIT_TIMEOUT); + assertSame(RequestStatusState.COMPLETED, state); + + // using processAndWait deletes the requestid + state = CollectionAdminRequest.requestStatus("request1").process(client).getRequestStatus(); + assertSame("Request id was not deleted by processAndWait call", RequestStatusState.NOT_FOUND, state); + } @Test public void testDeleteStatusFlush() throws Exception { - CollectionAdminRequest.Create create = new CollectionAdminRequest.Create(); - create.setConfigName("conf1") - .setCollectionName("foo") - .setAsyncId("foo") - .setNumShards(1) - .setReplicationFactor(1) - .process(cloudClient); - - create = new CollectionAdminRequest.Create(); - create.setConfigName("conf1") - .setCollectionName("bar") - .setAsyncId("bar") - .setNumShards(1) - .setReplicationFactor(1) - .process(cloudClient); - - RequestStatusState state = getRequestStateAfterCompletion("foo", 30, cloudClient); - assertEquals(RequestStatusState.COMPLETED, state); - - state = getRequestStateAfterCompletion("bar", 30, cloudClient); - assertEquals(RequestStatusState.COMPLETED, state); - - CollectionAdminRequest.DeleteStatus deleteStatus = new CollectionAdminRequest.DeleteStatus(); - deleteStatus.setFlush(true) - .process(cloudClient); - - assertEquals(RequestStatusState.NOT_FOUND, getRequestState("foo", cloudClient)); - assertEquals(RequestStatusState.NOT_FOUND, getRequestState("bar", cloudClient)); - - deleteStatus = new CollectionAdminRequest.DeleteStatus(); + + final CloudSolrClient client = cluster.getSolrClient(); + + String id1 = CollectionAdminRequest.createCollection("flush1", "conf1", 1, 1).processAsync(client); + String id2 = CollectionAdminRequest.createCollection("flush2", "conf1", 1, 1).processAsync(client); + + assertEquals(RequestStatusState.COMPLETED, waitForRequestState(id1, client, MAX_WAIT_TIMEOUT)); + assertEquals(RequestStatusState.COMPLETED, waitForRequestState(id2, client, MAX_WAIT_TIMEOUT)); + + CollectionAdminRequest.deleteAllAsyncIds().process(client); + + assertEquals(RequestStatusState.NOT_FOUND, + CollectionAdminRequest.requestStatus(id1).process(client).getRequestStatus()); + assertEquals(RequestStatusState.NOT_FOUND, + CollectionAdminRequest.requestStatus(id2).process(client).getRequestStatus()); + + } + + @Test + @SuppressWarnings("deprecation") + public void testDeprecatedConstructorValidation() throws Exception { + + final CloudSolrClient client = cluster.getSolrClient(); + try { - deleteStatus.process(cloudClient); + new CollectionAdminRequest.DeleteStatus().process(client); fail("delete status should have failed"); - } catch (HttpSolrClient.RemoteSolrException e) { + } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Either requestid or flush parameter must be specified.")); } - deleteStatus = new CollectionAdminRequest.DeleteStatus(); try { - deleteStatus.setFlush(true) + new CollectionAdminRequest.DeleteStatus().setFlush(true) .setRequestId("foo") - .process(cloudClient); + .process(client); fail("delete status should have failed"); - } catch (HttpSolrClient.RemoteSolrException e) { + } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Both requestid and flush parameters can not be specified together.")); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java index c9c8c3989dfb..4f28408ce210 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java @@ -18,8 +18,6 @@ import java.io.IOException; import java.util.Collection; -import java.util.Iterator; -import java.util.Map; import java.util.Properties; import java.util.UUID; import java.util.concurrent.TimeUnit; @@ -80,16 +78,15 @@ public Collection getContentStreams() throws IOException { } protected void addProperties(ModifiableSolrParams params, Properties props) { - Iterator> iter = props.entrySet().iterator(); - while(iter.hasNext()) { - Map.Entry prop = iter.next(); - String key = (String) prop.getKey(); - String value = (String) prop.getValue(); - params.set(PROPERTY_PREFIX + key, value); + for (String propertyName : props.stringPropertyNames()) { + params.set(PROPERTY_PREFIX + propertyName, props.getProperty(propertyName)); } } - protected abstract static class AsyncCollectionAdminRequest extends CollectionAdminRequest { + /** + * Base class for asynchronous collection admin requests + */ + public abstract static class AsyncCollectionAdminRequest extends CollectionAdminRequest { public AsyncCollectionAdminRequest(CollectionAction action) { super(action); @@ -164,7 +161,7 @@ public RequestStatusState processAndWait(SolrClient client, long timeoutSeconds) public RequestStatusState processAndWait(String asyncId, SolrClient client, long timeoutSeconds) throws IOException, SolrServerException, InterruptedException { processAsync(asyncId, client); - return new RequestStatus().setRequestId(asyncId).waitFor(client, timeoutSeconds); + return requestStatus(asyncId).waitFor(client, timeoutSeconds); } @Override @@ -181,10 +178,12 @@ protected abstract static class AsyncCollectionSpecificAdminRequest extends Asyn protected String collection; - public AsyncCollectionSpecificAdminRequest(CollectionAction action) { + public AsyncCollectionSpecificAdminRequest(CollectionAction action, String collection) { super(action); + this.collection = collection; } + @Deprecated public abstract AsyncCollectionSpecificAdminRequest setCollectionName(String collection); @Override @@ -202,12 +201,14 @@ protected abstract static class AsyncShardSpecificAdminRequest extends AsyncColl protected String collection; protected String shard; - public AsyncShardSpecificAdminRequest(CollectionAction action) { + public AsyncShardSpecificAdminRequest(CollectionAction action, String collection, String shard) { super(action); } + @Deprecated public abstract AsyncShardSpecificAdminRequest setCollectionName(String collection); + @Deprecated public abstract AsyncShardSpecificAdminRequest setShardName(String shard); @Override @@ -228,12 +229,14 @@ protected abstract static class ShardSpecificAdminRequest extends CollectionAdmi protected String collection; protected String shard; - public ShardSpecificAdminRequest(CollectionAction action) { + public ShardSpecificAdminRequest(CollectionAction action, String collection, String shard) { super(action); } + @Deprecated public abstract ShardSpecificAdminRequest setCollectionName(String collection); + @Deprecated public abstract ShardSpecificAdminRequest setShardName(String shard); @Override @@ -264,7 +267,7 @@ protected abstract static class CollectionAdminRoleRequest extends AsyncCollecti protected String node; protected String role; - public CollectionAdminRoleRequest(CollectionAction action) { + public CollectionAdminRoleRequest(CollectionAction action, String node, String role) { super(action); } @@ -274,12 +277,14 @@ public CollectionAdminRoleRequest setAsyncId(String id) { return this; } + @Deprecated public abstract CollectionAdminRoleRequest setNode(String node); public String getNode() { return this.node; } + @Deprecated public abstract CollectionAdminRoleRequest setRole(String role); public String getRole() { @@ -298,6 +303,17 @@ public SolrParams getParams() { /** Specific Collection API call implementations **/ + /** + * Returns a SolrRequest for creating a collection + * @param collection the collection name + * @param config the collection config + * @param numShards the number of shards in the collection + * @param numReplicas the replication factor of the collection + */ + public static Create createCollection(String collection, String config, int numShards, int numReplicas) { + return new Create(collection, config, numShards, numReplicas); + } + // CREATE request public static class Create extends AsyncCollectionSpecificAdminRequest { @@ -315,17 +331,31 @@ public static class Create extends AsyncCollectionSpecificAdminRequest { protected Integer stateFormat; private String[] rule , snitch; + /** + * @deprecated Use {@link #createCollection(String, String, int, int)} + */ + @Deprecated public Create() { - super(CollectionAction.CREATE); + super(CollectionAction.CREATE, null); } + private Create(String collection, String config, int numShards, int numReplicas) { + super(CollectionAction.CREATE, SolrIdentifierValidator.validateCollectionName(collection)); + this.configName = config; + this.numShards = numShards; + this.replicationFactor = numReplicas; + } + + @Deprecated public Create setConfigName(String config) { this.configName = config; return this; } public Create setCreateNodeSet(String nodeSet) { this.createNodeSet = nodeSet; return this; } public Create setRouterName(String routerName) { this.routerName = routerName; return this; } public Create setRouterField(String routerField) { this.routerField = routerField; return this; } + @Deprecated public Create setNumShards(Integer numShards) {this.numShards = numShards; return this; } public Create setMaxShardsPerNode(Integer numShards) { this.maxShardsPerNode = numShards; return this; } public Create setAutoAddReplicas(boolean autoAddReplicas) { this.autoAddReplicas = autoAddReplicas; return this; } + @Deprecated public Create setReplicationFactor(Integer repl) { this.replicationFactor = repl; return this; } public Create setStateFormat(Integer stateFormat) { this.stateFormat = stateFormat; return this; } public Create setRule(String... s){ this.rule = s; return this; } @@ -350,10 +380,7 @@ public Create() { */ public Create setShards(String shards) { for (String shard : shards.split(",")) { - if (!SolrIdentifierValidator.validateShardName(shard)) { - throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD, - shard)); - } + SolrIdentifierValidator.validateShardName(shard); } this.shards = shards; return this; @@ -366,16 +393,14 @@ public Create setShards(String shards) { * * @throws IllegalArgumentException if the collection name contains invalid characters. */ + @Deprecated public Create setCollectionName(String collectionName) throws SolrException { - if (!SolrIdentifierValidator.validateCollectionName(collectionName)) { - throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.COLLECTION, - collectionName)); - } - this.collection = collectionName; + this.collection = SolrIdentifierValidator.validateCollectionName(collectionName); return this; } @Override + @Deprecated public Create setAsyncId(String id) { this.asyncId = id; return this; @@ -426,46 +451,87 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to reload a collection + */ + public static Reload reloadCollection(String collection) { + return new Reload(collection); + } + // RELOAD request public static class Reload extends AsyncCollectionSpecificAdminRequest { + /** + * @deprecated use {@link #reloadCollection(String)} + */ + @Deprecated public Reload() { - super(CollectionAction.RELOAD); + super(CollectionAction.RELOAD, null); + } + + private Reload(String collection) { + super(CollectionAction.RELOAD, collection); } @Override + @Deprecated public Reload setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public Reload setAsyncId(String id) { this.asyncId = id; return this; } } + /** + * Returns a SolrRequest to delete a collection + */ + public static Delete deleteCollection(String collection) { + return new Delete(collection); + } + // DELETE request public static class Delete extends AsyncCollectionSpecificAdminRequest { + /** + * @deprecated Use {@link #deleteCollection(String)} + */ + @Deprecated public Delete() { - super(CollectionAction.DELETE); + super(CollectionAction.DELETE, null); + } + + private Delete(String collection) { + super(CollectionAction.DELETE, collection); } @Override + @Deprecated public Delete setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public Delete setAsyncId(String id) { this.asyncId = id; return this; } } + /** + * Returns a SolrRequest to create a new shard in a collection + */ + public static CreateShard createShard(String collection, String shard) { + return new CreateShard(collection, shard); + } + // CREATESHARD request public static class CreateShard extends AsyncShardSpecificAdminRequest { @@ -490,11 +556,20 @@ public CreateShard setProperties(Properties properties) { return this; } + /** + * @deprecated use {@link #createShard(String, String)} + */ + @Deprecated public CreateShard() { - super(CollectionAction.CREATESHARD); + super(CollectionAction.CREATESHARD, null, null); + } + + private CreateShard(String collection, String shard) { + super(CollectionAction.CREATESHARD, collection, SolrIdentifierValidator.validateShardName(shard)); } @Override + @Deprecated public CreateShard setCollectionName(String collection) { this.collection = collection; return this; @@ -508,16 +583,14 @@ public CreateShard setCollectionName(String collection) { * @throws IllegalArgumentException if the shard name contains invalid characters. */ @Override + @Deprecated public CreateShard setShardName(String shardName) { - if (!SolrIdentifierValidator.validateShardName(shardName)) { - throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.SHARD, - shardName)); - } - this.shard = shardName; + this.shard = SolrIdentifierValidator.validateShardName(shardName); return this; } @Override + @Deprecated public CreateShard setAsyncId(String id) { this.asyncId = id; return this; @@ -538,6 +611,13 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to split a shard in a collection + */ + public static SplitShard splitShard(String collection, String shard) { + return new SplitShard(collection, shard); + } + // SPLITSHARD request public static class SplitShard extends AsyncShardSpecificAdminRequest { protected String ranges; @@ -545,8 +625,16 @@ public static class SplitShard extends AsyncShardSpecificAdminRequest { private Properties properties; + private SplitShard(String collection, String shard) { + super(CollectionAction.SPLITSHARD, collection, shard); + } + + /** + * @deprecated Use {@link #splitShard(String, String)} + */ + @Deprecated public SplitShard() { - super(CollectionAction.SPLITSHARD); + super(CollectionAction.SPLITSHARD, null, null); } public SplitShard setRanges(String ranges) { this.ranges = ranges; return this; } @@ -571,18 +659,21 @@ public SplitShard setProperties(Properties properties) { } @Override + @Deprecated public SplitShard setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public SplitShard setShardName(String shard) { this.shard = shard; return this; } @Override + @Deprecated public SplitShard setAsyncId(String id) { this.asyncId = id; return this; @@ -604,14 +695,29 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to delete a shard from a collection + */ + public static DeleteShard deleteShard(String collection, String shard) { + return new DeleteShard(collection, shard); + } + // DELETESHARD request public static class DeleteShard extends AsyncShardSpecificAdminRequest { private Boolean deleteInstanceDir; private Boolean deleteDataDir; + /** + * @deprecated Use {@link #deleteShard(String, String)} + */ + @Deprecated public DeleteShard() { - super(CollectionAction.DELETESHARD); + super(CollectionAction.DELETESHARD, null, null); + } + + private DeleteShard(String collection, String shard) { + super(CollectionAction.DELETESHARD, collection, shard); } public Boolean getDeleteInstanceDir() { @@ -633,18 +739,21 @@ public DeleteShard setDeleteDataDir(Boolean deleteDataDir) { } @Override + @Deprecated public DeleteShard setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public DeleteShard setShardName(String shard) { this.shard = shard; return this; } @Override + @Deprecated public DeleteShard setAsyncId(String id) { this.asyncId = id; return this; @@ -663,21 +772,41 @@ public SolrParams getParams() { } } + /** + * Returns a SolrRequest to force a leader election for a shard in a collection + * + * WARNING: This may cause data loss if the new leader does not contain updates + * acknowledged by the old leader. Use only if leadership elections are entirely + * broken. + */ + public static ForceLeader forceLeaderElection(String collection, String shard) { + return new ForceLeader(collection, shard); + } + // FORCELEADER request public static class ForceLeader extends ShardSpecificAdminRequest { + /** + * @deprecated Use {@link #forceLeaderElection(String, String)} + */ + @Deprecated public ForceLeader() { - super(CollectionAction.FORCELEADER); + super(CollectionAction.FORCELEADER, null, null); } + private ForceLeader(String collection, String shard) { + super(CollectionAction.FORCELEADER, collection, shard); + } @Override + @Deprecated public ForceLeader setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public ForceLeader setShardName(String shard) { this.shard = shard; return this; @@ -685,6 +814,9 @@ public ForceLeader setShardName(String shard) { } + /** + * A response object for {@link RequestStatus} requests + */ public static class RequestStatusResponse extends CollectionAdminResponse { public RequestStatusState getRequestStatus() { @@ -694,15 +826,34 @@ public RequestStatusState getRequestStatus() { } + /** + * Returns a SolrRequest for checking the status of an asynchronous request + * + * @see CollectionAdminRequest.AsyncCollectionAdminRequest + */ + public static RequestStatus requestStatus(String requestId) { + return new RequestStatus(requestId); + } + // REQUESTSTATUS request public static class RequestStatus extends CollectionAdminRequest { protected String requestId = null; + private RequestStatus(String requestId) { + super(CollectionAction.REQUESTSTATUS); + this.requestId = requestId; + } + + /** + * @deprecated Use {@link #requestStatus(String)} + */ + @Deprecated public RequestStatus() { super(CollectionAction.REQUESTSTATUS); } + @Deprecated public RequestStatus setRequestId(String requestId) { this.requestId = requestId; return this; @@ -726,6 +877,12 @@ protected RequestStatusResponse createResponse(SolrClient client) { return new RequestStatusResponse(); } + /** + * Wait until the asynchronous request is either completed or failed, up to a timeout + * @param client a SolrClient + * @param timeoutSeconds the maximum time to wait in seconds + * @return the last seen state of the request + */ public RequestStatusState waitFor(SolrClient client, long timeoutSeconds) throws IOException, SolrServerException, InterruptedException { long finishTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(timeoutSeconds); @@ -733,7 +890,7 @@ public RequestStatusState waitFor(SolrClient client, long timeoutSeconds) while (System.nanoTime() < finishTime) { state = this.process(client).getRequestStatus(); if (state == RequestStatusState.COMPLETED || state == RequestStatusState.FAILED) { - new DeleteStatus().setRequestId(requestId).process(client); + deleteAsyncId(requestId).process(client); return state; } TimeUnit.SECONDS.sleep(1); @@ -742,21 +899,43 @@ public RequestStatusState waitFor(SolrClient client, long timeoutSeconds) } } + /** + * Returns a SolrRequest to delete an asynchronous request status + */ + public static DeleteStatus deleteAsyncId(String requestId) { + return new DeleteStatus(requestId); + } + + public static DeleteStatus deleteAllAsyncIds() { + return new DeleteStatus().setFlush(true); + } + // DELETESTATUS request public static class DeleteStatus extends CollectionAdminRequest { protected String requestId = null; protected Boolean flush = null; + private DeleteStatus(String requestId) { + super(CollectionAction.DELETESTATUS); + this.requestId = requestId; + } + + /** + * @deprecated Use {@link #deleteAsyncId(String)} or {@link #deleteAllAsyncIds()} + */ + @Deprecated public DeleteStatus() { super(CollectionAction.DELETESTATUS); } + @Deprecated public DeleteStatus setRequestId(String requestId) { this.requestId = requestId; return this; } + @Deprecated public DeleteStatus setFlush(Boolean flush) { this.flush = flush; return this; @@ -773,9 +952,12 @@ public Boolean getFlush() { @Override public SolrParams getParams() { ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); + if (requestId == null && flush == null) + throw new IllegalArgumentException("Either requestid or flush parameter must be specified."); + if (requestId != null && flush != null) + throw new IllegalArgumentException("Both requestid and flush parameters can not be specified together."); if (requestId != null) params.set(CoreAdminParams.REQUESTID, requestId); - if (flush != null) params.set(CollectionAdminParams.FLUSH, flush); return params; @@ -788,12 +970,31 @@ protected CollectionAdminResponse createResponse(SolrClient client) { } + /** + * Returns a SolrRequest to create a new alias + * @param aliasName the alias name + * @param aliasedCollections the collections to alias + */ + public static CreateAlias createAlias(String aliasName, String aliasedCollections) { + return new CreateAlias(aliasName, aliasedCollections); + } + // CREATEALIAS request public static class CreateAlias extends AsyncCollectionAdminRequest { protected String aliasName; protected String aliasedCollections; + private CreateAlias(String aliasName, String aliasedCollections) { + super(CollectionAction.CREATEALIAS); + this.aliasName = SolrIdentifierValidator.validateAliasName(aliasName); + this.aliasedCollections = aliasedCollections; + } + + /** + * @deprecated Use {@link #createAlias(String, String)} + */ + @Deprecated public CreateAlias() { super(CollectionAction.CREATEALIAS); } @@ -805,12 +1006,9 @@ public CreateAlias() { * * @throws IllegalArgumentException if the alias name contains invalid characters. */ + @Deprecated public CreateAlias setAliasName(String aliasName) { - if (!SolrIdentifierValidator.validateCollectionName(aliasName)) { - throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.ALIAS, - aliasName)); - } - this.aliasName = aliasName; + this.aliasName = SolrIdentifierValidator.validateAliasName(aliasName); return this; } @@ -818,6 +1016,7 @@ public String getAliasName() { return aliasName; } + @Deprecated public CreateAlias setAliasedCollections(String alias) { this.aliasedCollections = alias; return this; @@ -828,6 +1027,7 @@ public String getAliasedCollections() { } @Override + @Deprecated public CreateAlias setAsyncId(String id) { this.asyncId = id; return this; @@ -843,21 +1043,39 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to delete an alias + */ + public static DeleteAlias deleteAlias(String aliasName) { + return new DeleteAlias(aliasName); + } + // DELETEALIAS request public static class DeleteAlias extends AsyncCollectionAdminRequest { protected String aliasName; + private DeleteAlias(String aliasName) { + super(CollectionAction.DELETEALIAS); + this.aliasName = aliasName; + } + + /** + * @deprecated Use {@link #deleteAlias(String)} + */ + @Deprecated public DeleteAlias() { super(CollectionAction.DELETEALIAS); } + @Deprecated public DeleteAlias setAliasName(String aliasName) { this.aliasName = aliasName; return this; } @Override + @Deprecated public DeleteAlias setAsyncId(String id) { this.asyncId = id; return this; @@ -873,6 +1091,20 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to add a replica to a shard in a collection + */ + public static AddReplica addReplicaToShard(String collection, String shard) { + return new AddReplica(collection, shard, null); + } + + /** + * Returns a SolrRequest to add a replica to a collection using a route key + */ + public static AddReplica addReplicaByRouteKey(String collection, String routeKey) { + return new AddReplica(collection, null, routeKey); + } + // ADDREPLICA request public static class AddReplica extends AsyncCollectionAdminRequest { @@ -884,10 +1116,21 @@ public static class AddReplica extends AsyncCollectionAdminRequest { protected String dataDir; protected Properties properties; + /** + * @deprecated Use {@link #addReplicaByRouteKey(String, String)} or {@link #addReplicaToShard(String, String)} + */ + @Deprecated public AddReplica() { super(CollectionAction.ADDREPLICA); } + private AddReplica(String collection, String shard, String routeKey) { + super(CollectionAction.ADDREPLICA); + this.collection = collection; + this.shard = shard; + this.routeKey = routeKey; + } + public Properties getProperties() { return properties; } @@ -910,6 +1153,7 @@ public String getRouteKey() { return routeKey; } + @Deprecated public AddReplica setRouteKey(String routeKey) { this.routeKey = routeKey; return this; @@ -933,17 +1177,20 @@ public AddReplica setDataDir(String dataDir) { return this; } + @Deprecated public AddReplica setCollectionName(String collection) { this.collection = collection; return this; } + @Deprecated public AddReplica setShardName(String shard) { this.shard = shard; return this; } @Override + @Deprecated public AddReplica setAsyncId(String id) { this.asyncId = id; return this; @@ -957,7 +1204,7 @@ public SolrParams getParams() { params.add(CoreAdminParams.COLLECTION, collection); if (shard == null || shard.isEmpty()) { if (routeKey == null) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Either shard or routeKey must be provided"); + throw new IllegalArgumentException("Either shard or routeKey must be provided"); } params.add(ShardParams._ROUTE_, routeKey); } @@ -979,7 +1226,13 @@ public SolrParams getParams() { return params; } + } + /** + * Returns a SolrRequest to delete a replica from a shard in a collection + */ + public static DeleteReplica deleteReplica(String collection, String shard, String replica) { + return new DeleteReplica(collection, shard, replica); } // DELETEREPLICA request @@ -991,10 +1244,20 @@ public static class DeleteReplica extends AsyncShardSpecificAdminRequest { private Boolean deleteInstanceDir; private Boolean deleteIndexDir; + /** + * @deprecated Use {@link #deleteReplica(String, String, String)} + */ + @Deprecated public DeleteReplica() { - super(CollectionAction.DELETEREPLICA); + super(CollectionAction.DELETEREPLICA, null, null); + } + + private DeleteReplica(String collection, String shard, String replica) { + super(CollectionAction.DELETEREPLICA, collection, shard); + this.replica = replica; } + @Deprecated public DeleteReplica setReplica(String replica) { this.replica = replica; return this; @@ -1014,18 +1277,21 @@ public Boolean getOnlyIfDown() { } @Override + @Deprecated public DeleteReplica setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public DeleteReplica setShardName(String shard) { this.shard = shard; return this; } @Override + @Deprecated public DeleteReplica setAsyncId(String id) { this.asyncId = id; return this; @@ -1070,16 +1336,34 @@ public DeleteReplica setDeleteInstanceDir(Boolean deleteInstanceDir) { } } + /** + * Returns a SolrRequest to set a cluster property + */ + public static ClusterProp setClusterProperty(String propertyName, String propertyValue) { + return new ClusterProp(propertyName, propertyValue); + } + // CLUSTERPROP request public static class ClusterProp extends CollectionAdminRequest { private String propertyName; private String propertyValue; + /** + * @deprecated Use {@link #setClusterProperty(String, String)} + */ + @Deprecated public ClusterProp() { super(CollectionAction.CLUSTERPROP); } + private ClusterProp(String propertyName, String propertyValue) { + super(CollectionAction.CLUSTERPROP); + this.propertyName = propertyName; + this.propertyValue = propertyValue; + } + + @Deprecated public ClusterProp setPropertyName(String propertyName) { this.propertyName = propertyName; return this; @@ -1089,6 +1373,7 @@ public String getPropertyName() { return this.propertyName; } + @Deprecated public ClusterProp setPropertyValue(String propertyValue) { this.propertyValue = propertyValue; return this; @@ -1115,6 +1400,13 @@ protected CollectionAdminResponse createResponse(SolrClient client) { } + /** + * Returns a SolrRequest to migrate data matching a split key to another collection + */ + public static Migrate migrateData(String collection, String targetCollection, String splitKey) { + return new Migrate(collection, targetCollection, splitKey); + } + // MIGRATE request public static class Migrate extends AsyncCollectionAdminRequest { @@ -1124,10 +1416,22 @@ public static class Migrate extends AsyncCollectionAdminRequest { private Integer forwardTimeout; private Properties properties; + /** + * @deprecated Use {@link #migrateData(String, String, String)} + */ + @Deprecated public Migrate() { super(CollectionAction.MIGRATE); } + private Migrate(String collection, String targetCollection, String splitKey) { + super(CollectionAction.MIGRATE); + this.collection = collection; + this.targetCollection = targetCollection; + this.splitKey = splitKey; + } + + @Deprecated public Migrate setCollectionName(String collection) { this.collection = collection; return this; @@ -1137,6 +1441,7 @@ public String getCollectionName() { return collection; } + @Deprecated public Migrate setTargetCollection(String targetCollection) { this.targetCollection = targetCollection; return this; @@ -1146,6 +1451,7 @@ public String getTargetCollection() { return this.targetCollection; } + @Deprecated public Migrate setSplitKey(String splitKey) { this.splitKey = splitKey; return this; @@ -1174,6 +1480,7 @@ public Properties getProperties() { } @Override + @Deprecated public Migrate setAsyncId(String id) { this.asyncId = id; return this; @@ -1198,46 +1505,87 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to add a role to a node + */ + public static AddRole addRole(String node, String role) { + return new AddRole(node, role); + } + // ADDROLE request public static class AddRole extends CollectionAdminRoleRequest { + /** + * @deprecated Use {@link #addRole(String, String)} + */ + @Deprecated public AddRole() { - super(CollectionAction.ADDROLE); + super(CollectionAction.ADDROLE, null, null); + } + + private AddRole(String node, String role) { + super(CollectionAction.ADDROLE, node, role); } @Override + @Deprecated public AddRole setNode(String node) { this.node = node; return this; } @Override + @Deprecated public AddRole setRole(String role) { this.role = role; return this; } } + /** + * Returns a SolrRequest to remove a role from a node + */ + public static RemoveRole removeRole(String node, String role) { + return new RemoveRole(node, role); + } + // REMOVEROLE request public static class RemoveRole extends CollectionAdminRoleRequest { + /** + * @deprecated Use {@link #removeRole(String, String)} + */ + @Deprecated public RemoveRole() { - super(CollectionAction.REMOVEROLE); + super(CollectionAction.REMOVEROLE, null, null); + } + + private RemoveRole(String node, String role) { + super(CollectionAction.REMOVEROLE, node, role); } @Override + @Deprecated public RemoveRole setNode(String node) { this.node = node; return this; } @Override + @Deprecated public RemoveRole setRole(String role) { this.role = role; return this; } } + /** + * Return a SolrRequest to get the Overseer status + */ + public static OverseerStatus getOverseerStatus() { + return new OverseerStatus(); + } + // OVERSEERSTATUS request public static class OverseerStatus extends AsyncCollectionAdminRequest { @@ -1246,12 +1594,20 @@ public OverseerStatus () { } @Override + @Deprecated public OverseerStatus setAsyncId(String id) { this.asyncId = id; return this; } } + /** + * Return a SolrRequest to get the Cluster status + */ + public static ClusterStatus getClusterStatus() { + return new ClusterStatus(); + } + // CLUSTERSTATUS request public static class ClusterStatus extends CollectionAdminRequest { @@ -1310,7 +1666,13 @@ protected CollectionAdminResponse createResponse(SolrClient client) { return new CollectionAdminResponse(); } + } + /** + * Returns a SolrRequest to get a list of collections in the cluster + */ + public static List listCollections() { + return new List(); } // LIST request @@ -1325,6 +1687,14 @@ protected CollectionAdminResponse createResponse(SolrClient client) { } } + /** + * Returns a SolrRequest to add a property to a specific replica + */ + public static AddReplicaProp addReplicaProperty(String collection, String shard, String replica, + String propertyName, String propertyValue) { + return new AddReplicaProp(collection, shard, replica, propertyName, propertyValue); + } + // ADDREPLICAPROP request public static class AddReplicaProp extends AsyncShardSpecificAdminRequest { @@ -1333,14 +1703,26 @@ public static class AddReplicaProp extends AsyncShardSpecificAdminRequest { private String propertyValue; private Boolean shardUnique; + /** + * @deprecated Use {@link #addReplicaProperty(String, String, String, String, String)} + */ + @Deprecated public AddReplicaProp() { - super(CollectionAction.ADDREPLICAPROP); + super(CollectionAction.ADDREPLICAPROP, null, null); + } + + private AddReplicaProp(String collection, String shard, String replica, String propertyName, String propertyValue) { + super(CollectionAction.ADDREPLICAPROP, collection, shard); + this.replica = replica; + this.propertyName = propertyName; + this.propertyValue = propertyValue; } public String getReplica() { return replica; } + @Deprecated public AddReplicaProp setReplica(String replica) { this.replica = replica; return this; @@ -1350,6 +1732,7 @@ public String getPropertyName() { return propertyName; } + @Deprecated public AddReplicaProp setPropertyName(String propertyName) { this.propertyName = propertyName; return this; @@ -1359,6 +1742,7 @@ public String getPropertyValue() { return propertyValue; } + @Deprecated public AddReplicaProp setPropertyValue(String propertyValue) { this.propertyValue = propertyValue; return this; @@ -1374,18 +1758,21 @@ public AddReplicaProp setShardUnique(Boolean shardUnique) { } @Override + @Deprecated public AddReplicaProp setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public AddReplicaProp setShardName(String shard) { this.shard = shard; return this; } @Override + @Deprecated public AddReplicaProp setAsyncId(String id) { this.asyncId = id; return this; @@ -1407,20 +1794,39 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to delete a property from a specific replica + */ + public static DeleteReplicaProp deleteReplicaProperty(String collection, String shard, + String replica, String propertyName) { + return new DeleteReplicaProp(collection, shard, replica, propertyName); + } + // DELETEREPLICAPROP request public static class DeleteReplicaProp extends AsyncShardSpecificAdminRequest { private String replica; private String propertyName; + /** + * @deprecated Use {@link #deleteReplicaProperty(String, String, String, String)} + */ + @Deprecated public DeleteReplicaProp() { - super(CollectionAction.DELETEREPLICAPROP); + super(CollectionAction.DELETEREPLICAPROP, null, null); + } + + private DeleteReplicaProp(String collection, String shard, String replica, String propertyName) { + super(CollectionAction.DELETEREPLICAPROP, collection, shard); + this.replica = replica; + this.propertyName = propertyName; } public String getReplica() { return replica; } + @Deprecated public DeleteReplicaProp setReplica(String replica) { this.replica = replica; return this; @@ -1430,24 +1836,28 @@ public String getPropertyName() { return propertyName; } + @Deprecated public DeleteReplicaProp setPropertyName(String propertyName) { this.propertyName = propertyName; return this; } @Override + @Deprecated public DeleteReplicaProp setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public DeleteReplicaProp setShardName(String shard) { this.shard = shard; return this; } @Override + @Deprecated public DeleteReplicaProp setAsyncId(String id) { this.asyncId = id; return this; @@ -1464,21 +1874,41 @@ public SolrParams getParams() { } + /** + * Returns a SolrRequest to migrate a collection state format + * + * This is an expert-level request, and should not generally be necessary. + */ + public static MigrateClusterState migrateCollectionFormat(String collection) { + return new MigrateClusterState(collection); + } + // MIGRATECLUSTERSTATE request public static class MigrateClusterState extends AsyncCollectionAdminRequest { protected String collection; + private MigrateClusterState(String collection) { + super(CollectionAction.MIGRATESTATEFORMAT); + this.collection = collection; + } + + /** + * @deprecated Use {@link #migrateCollectionFormat(String)} + */ + @Deprecated public MigrateClusterState() { super(CollectionAction.MIGRATESTATEFORMAT); } + @Deprecated public MigrateClusterState setCollectionName(String collection) { this.collection = collection; return this; } @Override + @Deprecated public MigrateClusterState setAsyncId(String id) { this.asyncId = id; return this; @@ -1494,6 +1924,13 @@ public SolrParams getParams() { } } + /** + * Returns a SolrRequest to balance a replica property across the shards of a collection + */ + public static BalanceShardUnique balanceReplicaProperty(String collection, String propertyName) { + return new BalanceShardUnique(collection, propertyName); + } + // BALANCESHARDUNIQUE request public static class BalanceShardUnique extends AsyncCollectionAdminRequest { @@ -1502,6 +1939,16 @@ public static class BalanceShardUnique extends AsyncCollectionAdminRequest { protected Boolean onlyActiveNodes; protected Boolean shardUnique; + private BalanceShardUnique(String collection, String propertyName) { + super(CollectionAction.BALANCESHARDUNIQUE); + this.collection = collection; + this.propertyName = propertyName; + } + + /** + * @deprecated Use {@link #balanceReplicaProperty(String, String)} + */ + @Deprecated public BalanceShardUnique() { super(CollectionAction.BALANCESHARDUNIQUE); } @@ -1510,6 +1957,7 @@ public String getPropertyName() { return propertyName; } + @Deprecated public BalanceShardUnique setPropertyName(String propertyName) { this.propertyName = propertyName; return this; @@ -1533,6 +1981,7 @@ public BalanceShardUnique setShardUnique(Boolean shardUnique) { return this; } + @Deprecated public BalanceShardUnique setCollection(String collection) { this.collection = collection; return this; @@ -1543,6 +1992,7 @@ public String getCollection() { } @Override + @Deprecated public BalanceShardUnique setAsyncId(String id) { this.asyncId = id; return this; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java index ab563ed87ed5..7d9e356e73fa 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java @@ -16,6 +16,11 @@ */ package org.apache.solr.client.solrj.request; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -29,11 +34,6 @@ import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ContentStream; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - /** * This class is experimental and subject to change. * @@ -110,11 +110,7 @@ public String getConfigSet() { */ @Override public void setCoreName(String coreName) { - if (!SolrIdentifierValidator.validateCoreName(coreName)) { - throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE, - coreName)); - } - this.core = coreName; + this.core = SolrIdentifierValidator.validateCoreName(coreName); } @Override @@ -559,14 +555,9 @@ public static CoreAdminResponse unloadCore(String name, boolean deleteIndex, boo */ public static CoreAdminResponse renameCore(String coreName, String newName, SolrClient client ) throws SolrServerException, IOException { - if (!SolrIdentifierValidator.validateCoreName(newName)) { - throw new IllegalArgumentException(SolrIdentifierValidator.getIdentifierMessage(SolrIdentifierValidator.IdentifierType.CORE, - newName)); - } - CoreAdminRequest req = new CoreAdminRequest(); req.setCoreName(coreName); - req.setOtherCoreName(newName); + req.setOtherCoreName(SolrIdentifierValidator.validateCoreName(newName)); req.setAction( CoreAdminAction.RENAME ); return req.process( client ); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java index 449c62127b90..57f9909f0514 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java @@ -32,18 +32,28 @@ public enum IdentifierType { SHARD, COLLECTION, CORE, ALIAS } - public static boolean validateShardName(String shardName) { - return validateIdentifier(shardName); + public static String validateName(IdentifierType type, String name) { + if (!validateIdentifier(name)) + throw new IllegalArgumentException(getIdentifierMessage(type, name)); + return name; } - - public static boolean validateCollectionName(String collectionName) { - return validateIdentifier(collectionName); + + public static String validateShardName(String shardName) { + return validateName(IdentifierType.SHARD, shardName); } - public static boolean validateCoreName(String name) { - return validateIdentifier(name); + public static String validateCollectionName(String collectionName) { + return validateName(IdentifierType.COLLECTION, collectionName); } - + + public static String validateAliasName(String alias) { + return validateName(IdentifierType.ALIAS, alias); + } + + public static String validateCoreName(String coreName) { + return validateName(IdentifierType.CORE, coreName); + } + private static boolean validateIdentifier(String identifier) { if (identifier == null || ! identifierPattern.matcher(identifier).matches()) { return false; From 8ff5b57cbde0aa80e4c2b53afe54ef0b38ffd879 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Wed, 9 Mar 2016 09:28:50 -0500 Subject: [PATCH 0077/1113] add 2B point test --- .../org/apache/lucene/index/Test2BPoints.java | 130 ++++++++++++++++++ 1 file changed, 130 insertions(+) create mode 100644 lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java new file mode 100644 index 000000000000..bfe387e8ca9a --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.index; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.util.LuceneTestCase.Monster; +import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; +import org.apache.lucene.util.TimeUnits; + +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + +// e.g. run like this: ant test -Dtestcase=Test2BPoints -Dtests.nightly=true -Dtests.verbose=true -Dtests.monster=true +// +// or: python -u /l/util/src/python/repeatLuceneTest.py -once -nolog -tmpDir /b/tmp -logDir /l/logs Test2BPoints.test1D -verbose + +@SuppressCodecs({ "SimpleText", "Memory", "Direct", "Compressing" }) +@TimeoutSuite(millis = 16 * TimeUnits.HOUR) +@Monster("takes at least 4 hours and consumes many GB of temp disk space") +public class Test2BPoints extends LuceneTestCase { + public void test1D() throws Exception { + Directory dir = FSDirectory.open(createTempDir("2BPoints1D")); + System.out.println("DIR: " + ((FSDirectory) dir).getDirectory()); + + IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())) + .setCodec(Codec.forName("Lucene60")) + .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .setRAMBufferSizeMB(64.0) + .setMergeScheduler(new ConcurrentMergeScheduler()) + .setMergePolicy(newLogMergePolicy(false, 10)) + .setOpenMode(IndexWriterConfig.OpenMode.CREATE); + + IndexWriter w = new IndexWriter(dir, iwc); + + MergePolicy mp = w.getConfig().getMergePolicy(); + if (mp instanceof LogByteSizeMergePolicy) { + // 1 petabyte: + ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024*1024*1024); + } + + final int numDocs = (Integer.MAX_VALUE / 26) + 1; + long counter = 0; + for (int i = 0; i < numDocs; i++) { + Document doc = new Document(); + for (int j=0;j<26;j++) { + doc.add(new LongPoint("long", counter)); + counter++; + } + w.addDocument(doc); + if (VERBOSE && i % 100000 == 0) { + System.out.println(i + " of " + numDocs + "..."); + } + } + w.forceMerge(1); + DirectoryReader r = DirectoryReader.open(w); + IndexSearcher s = new IndexSearcher(r); + assertEquals(1250, s.count(LongPoint.newRangeQuery("long", 33640828, 33673327))); + assertTrue(r.leaves().get(0).reader().getPointValues().size("long") > Integer.MAX_VALUE); + r.close(); + w.close(); + System.out.println("TEST: now CheckIndex"); + TestUtil.checkIndex(dir); + dir.close(); + } + + public void test2D() throws Exception { + Directory dir = FSDirectory.open(createTempDir("2BPoints2D")); + + IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())) + .setCodec(Codec.forName("Lucene60")) + .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .setRAMBufferSizeMB(64.0) + .setMergeScheduler(new ConcurrentMergeScheduler()) + .setMergePolicy(newLogMergePolicy(false, 10)) + .setOpenMode(IndexWriterConfig.OpenMode.CREATE); + + IndexWriter w = new IndexWriter(dir, iwc); + + MergePolicy mp = w.getConfig().getMergePolicy(); + if (mp instanceof LogByteSizeMergePolicy) { + // 1 petabyte: + ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024*1024*1024); + } + + final int numDocs = (Integer.MAX_VALUE / 26) + 1; + long counter = 0; + for (int i = 0; i < numDocs; i++) { + Document doc = new Document(); + for (int j=0;j<26;j++) { + doc.add(new LongPoint("long", counter, 2*counter+1)); + counter++; + } + w.addDocument(doc); + if (VERBOSE && i % 100000 == 0) { + System.out.println(i + " of " + numDocs + "..."); + } + } + w.forceMerge(1); + DirectoryReader r = DirectoryReader.open(w); + IndexSearcher s = new IndexSearcher(r); + assertEquals(1250, s.count(LongPoint.newRangeQuery("long", new long[] {33640828, 33673327}, new long[] {Long.MIN_VALUE, Long.MAX_VALUE}))); + assertTrue(r.leaves().get(0).reader().getPointValues().size("long") > Integer.MAX_VALUE); + r.close(); + w.close(); + System.out.println("TEST: now CheckIndex"); + TestUtil.checkIndex(dir); + dir.close(); + } +} From 9f8fe1239afb7089b9f85432d076bdd778d3cd50 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Wed, 9 Mar 2016 10:07:15 -0500 Subject: [PATCH 0078/1113] LUCENE-7085: PointRangeQuery.equals sometimes returns false even if queries were in fact equal --- .../apache/lucene/search/PointInSetQuery.java | 10 +-- .../apache/lucene/search/PointRangeQuery.java | 38 ++++++--- .../lucene/search/TestPointQueries.java | 80 +++++++++++++++++++ .../lucene/document/InetAddressPoint.java | 23 +++++- .../lucene/document/TestBigIntegerPoint.java | 15 ++++ .../lucene/document/TestInetAddressPoint.java | 21 +++++ .../lucene/document/TestLatLonPoint.java | 20 +++++ .../spatial3d/PointInGeo3DShapeQuery.java | 4 +- .../lucene/spatial3d/TestGeo3DPoint.java | 14 ++++ 9 files changed, 202 insertions(+), 23 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java index 944fadfb2bb2..bee864f8966d 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointInSetQuery.java @@ -103,7 +103,7 @@ protected PointInSetQuery(String field, int numDims, int bytesPerDim, Stream pac } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { // We don't use RandomAccessWeight here: it's no good to approximate with "match all docs". // This is an inverted structure and should be used in the first pass: @@ -161,14 +161,12 @@ private class MergePointVisitor implements IntersectVisitor { private final DocIdSetBuilder result; private TermIterator iterator; private BytesRef nextQueryPoint; - private final byte[] lastMaxPackedValue; private final BytesRef scratch = new BytesRef(); private final PrefixCodedTerms sortedPackedPoints; public MergePointVisitor(PrefixCodedTerms sortedPackedPoints, DocIdSetBuilder result) throws IOException { this.result = result; this.sortedPackedPoints = sortedPackedPoints; - lastMaxPackedValue = new byte[bytesPerDim]; scratch.length = bytesPerDim; this.iterator = sortedPackedPoints.iterator(); nextQueryPoint = iterator.next(); @@ -304,7 +302,7 @@ public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { } @Override - public int hashCode() { + public final int hashCode() { int hash = super.hashCode(); hash = 31 * hash + sortedPackedPointsHashCode; hash = 31 * hash + numDims; @@ -313,7 +311,7 @@ public int hashCode() { } @Override - public boolean equals(Object other) { + public final boolean equals(Object other) { if (super.equals(other)) { final PointInSetQuery q = (PointInSetQuery) other; return q.numDims == numDims && @@ -326,7 +324,7 @@ public boolean equals(Object other) { } @Override - public String toString(String field) { + public final String toString(String field) { final StringBuilder sb = new StringBuilder(); if (this.field.equals(field) == false) { sb.append(this.field); diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java index ebbe7e2fbd6a..9384d235d3bc 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java @@ -109,7 +109,7 @@ public static void checkArgs(String field, Object lowerPoint, Object upperPoint) } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { // We don't use RandomAccessWeight here: it's no good to approximate with "match all docs". // This is an inverted structure and should be used in the first pass: @@ -239,7 +239,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { } @Override - public int hashCode() { + public final int hashCode() { int hash = super.hashCode(); hash = 31 * hash + Arrays.hashCode(lowerPoint); hash = 31 * hash + Arrays.hashCode(upperPoint); @@ -249,20 +249,36 @@ public int hashCode() { } @Override - public boolean equals(Object other) { - if (super.equals(other)) { - final PointRangeQuery q = (PointRangeQuery) other; - return q.numDims == numDims && - q.bytesPerDim == bytesPerDim && - Arrays.equals(lowerPoint, q.lowerPoint) && - Arrays.equals(upperPoint, q.upperPoint); + public final boolean equals(Object other) { + if (super.equals(other) == false) { + return false; } - return false; + final PointRangeQuery q = (PointRangeQuery) other; + if (q.numDims != numDims) { + return false; + } + + if (q.bytesPerDim != bytesPerDim) { + return false; + } + + // Cannot use Arrays.equals here, because it in turn uses byte[].equals + // to compare each value, which only uses "==" + for(int dim=0;dim() { + @Override + public int compare(byte[] a, byte[] b) { + return StringHelper.compare(BYTES, a, 0, b, 0); + } + }); final BytesRef encoded = new BytesRef(new byte[BYTES]); @@ -230,7 +245,7 @@ public BytesRef next() { if (upto == sortedValues.length) { return null; } else { - encoded.bytes = encode(sortedValues[upto]); + encoded.bytes = sortedValues[upto]; assert encoded.bytes.length == encoded.length; upto++; return encoded; diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java index 500c2a320612..8f38bcd1aa46 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestBigIntegerPoint.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; @@ -93,4 +94,18 @@ public void testToString() throws Exception { new BigInteger[] {BigInteger.valueOf(17), BigInteger.valueOf(42)}).toString()); assertEquals("field:{1}", BigIntegerPoint.newSetQuery("field", BigInteger.ONE).toString()); } + + public void testQueryEquals() throws Exception { + Query q = BigIntegerPoint.newRangeQuery("a", BigInteger.valueOf(0), BigInteger.valueOf(1000)); + assertEquals(q, BigIntegerPoint.newRangeQuery("a", BigInteger.valueOf(0), BigInteger.valueOf(1000))); + assertFalse(q.equals(BigIntegerPoint.newRangeQuery("a", BigInteger.valueOf(1), BigInteger.valueOf(1000)))); + + q = BigIntegerPoint.newExactQuery("a", BigInteger.valueOf(1000)); + assertEquals(q, BigIntegerPoint.newExactQuery("a", BigInteger.valueOf(1000))); + assertFalse(q.equals(BigIntegerPoint.newExactQuery("a", BigInteger.valueOf(1)))); + + q = BigIntegerPoint.newSetQuery("a", BigInteger.valueOf(0), BigInteger.valueOf(1000), BigInteger.valueOf(17)); + assertEquals(q, BigIntegerPoint.newSetQuery("a", BigInteger.valueOf(17), BigInteger.valueOf(0), BigInteger.valueOf(1000))); + assertFalse(q.equals(BigIntegerPoint.newSetQuery("a", BigInteger.valueOf(1), BigInteger.valueOf(17), BigInteger.valueOf(1000)))); + } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java index d4ddb3adbfde..c91b52b252fb 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestInetAddressPoint.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; @@ -45,6 +46,7 @@ public void testBasics() throws Exception { assertEquals(1, searcher.count(InetAddressPoint.newPrefixQuery("field", address, 24))); assertEquals(1, searcher.count(InetAddressPoint.newRangeQuery("field", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5")))); assertEquals(1, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.4")))); + assertEquals(1, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.4"), InetAddress.getByName("1.2.3.5")))); assertEquals(0, searcher.count(InetAddressPoint.newSetQuery("field", InetAddress.getByName("1.2.3.3")))); assertEquals(0, searcher.count(InetAddressPoint.newSetQuery("field"))); @@ -88,4 +90,23 @@ public void testToString() throws Exception { assertEquals("field:[fdc8:57ed:f042:ad1:0:0:0:0 TO fdc8:57ed:f042:ad1:ffff:ffff:ffff:ffff]", InetAddressPoint.newPrefixQuery("field", InetAddress.getByName("fdc8:57ed:f042:0ad1:f66d:4ff:fe90:ce0c"), 64).toString()); assertEquals("field:{fdc8:57ed:f042:ad1:f66d:4ff:fe90:ce0c}", InetAddressPoint.newSetQuery("field", InetAddress.getByName("fdc8:57ed:f042:0ad1:f66d:4ff:fe90:ce0c")).toString()); } + + public void testQueryEquals() throws Exception { + Query q = InetAddressPoint.newRangeQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5")); + assertEquals(q, InetAddressPoint.newRangeQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5"))); + assertFalse(q.equals(InetAddressPoint.newRangeQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.7")))); + + q = InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.2.3.3"), 16); + assertEquals(q, InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.2.3.3"), 16)); + assertFalse(q.equals(InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.1.3.5"), 16))); + assertFalse(q.equals(InetAddressPoint.newPrefixQuery("a", InetAddress.getByName("1.2.3.5"), 24))); + + q = InetAddressPoint.newExactQuery("a", InetAddress.getByName("1.2.3.3")); + assertEquals(q, InetAddressPoint.newExactQuery("a", InetAddress.getByName("1.2.3.3"))); + assertFalse(q.equals(InetAddressPoint.newExactQuery("a", InetAddress.getByName("1.2.3.5")))); + + q = InetAddressPoint.newSetQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5")); + assertEquals(q, InetAddressPoint.newSetQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.5"))); + assertFalse(q.equals(InetAddressPoint.newSetQuery("a", InetAddress.getByName("1.2.3.3"), InetAddress.getByName("1.2.3.7")))); + } } diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java index 0ef948d3f6dd..61c6754c6542 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java +++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPoint.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; @@ -170,4 +171,23 @@ public void testEncodeDecodeIsStable() throws Exception { assertEquals(lonEnc, lonEnc2, 0.0); } } + + public void testQueryEquals() throws Exception { + Query q = LatLonPoint.newBoxQuery("field", 50, 70, -40, 20); + assertEquals(q, LatLonPoint.newBoxQuery("field", 50, 70, -40, 20)); + assertFalse(q.equals(LatLonPoint.newBoxQuery("field", 50, 70, -40, 10))); + + q = LatLonPoint.newDistanceQuery("field", 50, 70, 10000); + assertEquals(q, LatLonPoint.newDistanceQuery("field", 50, 70, 10000)); + assertFalse(q.equals(LatLonPoint.newDistanceQuery("field", 50, 70, 11000))); + assertFalse(q.equals(LatLonPoint.newDistanceQuery("field", 50, 60, 10000))); + + + double[] polyLats1 = new double[] {30, 40, 40, 30, 30}; + double[] polyLons1 = new double[] {90, 90, -40, -40, 90}; + double[] polyLats2 = new double[] {20, 40, 40, 20, 20}; + q = LatLonPoint.newPolygonQuery("field", polyLats1, polyLons1); + assertEquals(q, LatLonPoint.newPolygonQuery("field", polyLats1, polyLons1)); + assertFalse(q.equals(LatLonPoint.newPolygonQuery("field", polyLats2, polyLons1))); + } } diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java index 9df8752b1353..c9b5e4e635bb 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/PointInGeo3DShapeQuery.java @@ -43,7 +43,7 @@ * * @lucene.experimental */ -class PointInGeo3DShapeQuery extends Query { +final class PointInGeo3DShapeQuery extends Query { final String field; final GeoShape shape; @@ -192,7 +192,7 @@ public boolean equals(Object o) { } @Override - public final int hashCode() { + public int hashCode() { int result = super.hashCode(); result = 31 * result + shape.hashCode(); return result; diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java index a4d8ed136316..3061b763f856 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java @@ -807,4 +807,18 @@ public void testShapeQueryToString() { private static Directory getDirectory() { return newDirectory(); } + + public void testEquals() { + GeoShape shape = randomShape(PlanetModel.WGS84); + Query q = Geo3DPoint.newShapeQuery("point", shape); + assertEquals(q, Geo3DPoint.newShapeQuery("point", shape)); + + // make a different random shape: + GeoShape shape2; + do { + shape2 = randomShape(PlanetModel.WGS84); + } while (shape.equals(shape2)); + + assertFalse(q.equals(Geo3DPoint.newShapeQuery("point", shape2))); + } } From ba0f63c0c25ca520b8816ac1ae803d7c7451dcca Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Mar 2016 10:06:11 -0500 Subject: [PATCH 0079/1113] LUCENE-7088, LUCENE-7075: Add PointRangeQueryBuilder to xml-queryparser to replace LegacyNumericRangeQueryBuilder --- .../lucene/queryparser/xml/CoreParser.java | 1 + .../LegacyNumericRangeQueryBuilder.java | 2 + .../xml/builders/PointRangeQueryBuilder.java | 95 +++++++++++++++++++ .../queryparser/xml/PointRangeQuery.xml | 31 ++++++ .../queryparser/xml/TestCoreParser.java | 7 ++ 5 files changed, 136 insertions(+) create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java create mode 100644 lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java index 6bbb626a7507..1416f255bdd4 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java @@ -67,6 +67,7 @@ protected CoreParser(String defaultField, Analyzer analyzer, QueryParser parser) queryFactory.addBuilder("MatchAllDocsQuery", new MatchAllDocsQueryBuilder()); queryFactory.addBuilder("BooleanQuery", new BooleanQueryBuilder(queryFactory)); queryFactory.addBuilder("LegacyNumericRangeQuery", new LegacyNumericRangeQueryBuilder()); + queryFactory.addBuilder("PointRangeQuery", new PointRangeQueryBuilder()); queryFactory.addBuilder("RangeQuery", new RangeQueryBuilder()); queryFactory.addBuilder("DisjunctionMaxQuery", new DisjunctionMaxQueryBuilder(queryFactory)); if (parser != null) { diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java index 2aba681347fd..e19596430e24 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java @@ -83,7 +83,9 @@ * A {@link ParserException} will be thrown if an error occurs parsing the * supplied lowerTerm or upperTerm into the numeric type * specified by type. + * @deprecated Index with points and use {@link PointRangeQueryBuilder} instead */ +@Deprecated public class LegacyNumericRangeQueryBuilder implements QueryBuilder { @Override diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java new file mode 100644 index 000000000000..45483168bacd --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/PointRangeQueryBuilder.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.xml.builders; + +import org.apache.lucene.search.Query; +import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.queryparser.xml.DOMUtils; +import org.apache.lucene.queryparser.xml.ParserException; +import org.apache.lucene.queryparser.xml.QueryBuilder; +import org.w3c.dom.Element; + +/** + * Creates a range query across 1D {@link PointValues}. The table below specifies the required + * attributes and the defaults if optional attributes are omitted: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
    Attribute nameValuesRequiredDefault
    fieldNameStringYesN/A
    lowerTermSpecified by typeYesN/A
    upperTermSpecified by typeYesN/A
    typeint, long, float, doubleNoint
    + *

    + * A {@link ParserException} will be thrown if an error occurs parsing the + * supplied lowerTerm or upperTerm into the numeric type + * specified by type. + */ +public class PointRangeQueryBuilder implements QueryBuilder { + + @Override + public Query getQuery(Element e) throws ParserException { + String field = DOMUtils.getAttributeWithInheritanceOrFail(e, "fieldName"); + String lowerTerm = DOMUtils.getAttributeOrFail(e, "lowerTerm"); + String upperTerm = DOMUtils.getAttributeOrFail(e, "upperTerm"); + + String type = DOMUtils.getAttribute(e, "type", "int"); + try { + if (type.equalsIgnoreCase("int")) { + return IntPoint.newRangeQuery(field, Integer.valueOf(lowerTerm), Integer.valueOf(upperTerm)); + } else if (type.equalsIgnoreCase("long")) { + return LongPoint.newRangeQuery(field, Long.valueOf(lowerTerm), Long.valueOf(upperTerm)); + } else if (type.equalsIgnoreCase("double")) { + return DoublePoint.newRangeQuery(field, Double.valueOf(lowerTerm), Double.valueOf(upperTerm)); + } else if (type.equalsIgnoreCase("float")) { + return FloatPoint.newRangeQuery(field, Float.valueOf(lowerTerm), Float.valueOf(upperTerm)); + } else { + throw new ParserException("type attribute must be one of: [long, int, double, float]"); + } + } catch (NumberFormatException nfe) { + throw new ParserException("Could not parse lowerTerm or upperTerm into a number", nfe); + } + } +} diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml new file mode 100644 index 000000000000..45af138b8dae --- /dev/null +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/PointRangeQuery.xml @@ -0,0 +1,31 @@ + + + + + merger + + + sumitomo + + + bank + + + + + diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java index e675723e320d..5cadec21f3f6 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java @@ -22,6 +22,7 @@ import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -72,6 +73,7 @@ public static void beforeClass() throws Exception { doc.add(newTextField("date", date, Field.Store.YES)); doc.add(newTextField("contents", content, Field.Store.YES)); doc.add(new LegacyIntField("date2", Integer.valueOf(date), Field.Store.NO)); + doc.add(new IntPoint("date3", Integer.valueOf(date))); writer.addDocument(doc); line = d.readLine(); } @@ -164,6 +166,11 @@ public void testNumericRangeQueryXML() throws ParserException, IOException { Query q = parse("LegacyNumericRangeQuery.xml"); dumpResults("LegacyNumericRangeQuery", q, 5); } + + public void testPointRangeQuery() throws ParserException, IOException { + Query q = parse("PointRangeQuery.xml"); + dumpResults("PointRangeQuery", q, 5); + } //================= Helper methods =================================== From b9e204c07d6499256086422abd769235e4e1cda5 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Wed, 9 Mar 2016 11:03:19 -0500 Subject: [PATCH 0080/1113] CheckIndex failed to say it was checking points --- .../org/apache/lucene/index/CheckIndex.java | 7 +++++ .../index/TestAllFilesCheckIndexHeader.java | 3 +- .../index/TestAllFilesDetectTruncation.java | 4 +-- .../apache/lucene/index/TestPointValues.java | 29 +++++++++++++++++++ .../lucene/index/TestSwappedIndexFiles.java | 3 +- .../lucene/store/MockDirectoryWrapper.java | 2 +- .../java/org/apache/lucene/util/TestUtil.java | 14 +++++---- 7 files changed, 48 insertions(+), 14 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java index 3c437c1c5084..89b36efa2d32 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java +++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java @@ -1683,9 +1683,14 @@ public static Status.TermIndexStatus testPostings(CodecReader reader, PrintStrea * @lucene.experimental */ public static Status.PointsStatus testPoints(CodecReader reader, PrintStream infoStream, boolean failFast) throws IOException { + if (infoStream != null) { + infoStream.print(" test: points.............."); + } + long startNS = System.nanoTime(); FieldInfos fieldInfos = reader.getFieldInfos(); Status.PointsStatus status = new Status.PointsStatus(); try { + if (fieldInfos.hasPointValues()) { PointsReader values = reader.getPointsReader(); if (values == null) { @@ -1840,6 +1845,8 @@ private void checkPackedValue(String desc, byte[] packedValue, int docID) { } } } + msg(infoStream, String.format(Locale.ROOT, "OK [%d fields, %d points] [took %.3f sec]", status.totalValueFields, status.totalValuePoints, nsToSec(System.nanoTime()-startNS))); + } catch (Throwable e) { if (failFast) { IOUtils.reThrow(e); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java index 68b7cc23ea58..f6c1486bc433 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java @@ -28,7 +28,6 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; import org.apache.lucene.util.LuceneTestCase; @@ -132,7 +131,7 @@ private void checkOneFile(Directory dir, String victim) throws IOException { // CheckIndex should also fail: try { - TestUtil.checkIndex(dirCopy, true, true); + TestUtil.checkIndex(dirCopy, true, true, null); fail("wrong bytes not detected after randomizing first " + wrongBytes + " bytes out of " + victimLength + " for file " + victim); } catch (CorruptIndexException | EOFException | IndexFormatTooOldException e) { // expected diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java index 16caae3baa54..c751417cd742 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java @@ -19,7 +19,6 @@ import java.io.EOFException; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import org.apache.lucene.analysis.MockAnalyzer; @@ -28,7 +27,6 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; import org.apache.lucene.util.LuceneTestCase; @@ -116,7 +114,7 @@ private void truncateOneFile(Directory dir, String victim) throws IOException { // CheckIndex should also fail: try { - TestUtil.checkIndex(dirCopy, true, true); + TestUtil.checkIndex(dirCopy, true, true, null); fail("truncation not detected after removing " + lostBytes + " bytes out of " + victimLength + " for file " + victim); } catch (CorruptIndexException | EOFException e) { // expected diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java index 55d47944d302..9b18f0298ee3 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java @@ -17,6 +17,7 @@ package org.apache.lucene.index; +import java.io.ByteArrayOutputStream; import java.io.IOException; import org.apache.lucene.analysis.MockAnalyzer; @@ -40,6 +41,7 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -628,4 +630,31 @@ public void testSparsePoints() throws Exception { w.close(); dir.close(); } + + public void testCheckIndexIncludesPoints() throws Exception { + Directory dir = new RAMDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); + Document doc = new Document(); + doc.add(new IntPoint("int1", 17)); + w.addDocument(doc); + + doc = new Document(); + doc.add(new IntPoint("int1", 44)); + doc.add(new IntPoint("int2", -17)); + w.addDocument(doc); + w.close(); + + ByteArrayOutputStream output = new ByteArrayOutputStream(); + CheckIndex.Status status = TestUtil.checkIndex(dir, false, true, output); + assertEquals(1, status.segmentInfos.size()); + CheckIndex.Status.SegmentInfoStatus segStatus = status.segmentInfos.get(0); + // total 3 point values were index: + assertEquals(3, segStatus.pointsStatus.totalValuePoints); + // ... across 2 fields: + assertEquals(2, segStatus.pointsStatus.totalValueFields); + + // Make sure CheckIndex in fact declares that it is testing points! + assertTrue(output.toString(IOUtils.UTF_8).contains("test: points...")); + dir.close(); + } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java index a284fdd9d100..d2205020d99f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java @@ -28,7 +28,6 @@ import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; import org.apache.lucene.util.LuceneTestCase; @@ -118,7 +117,7 @@ private void swapOneFile(Directory dir1, Directory dir2, String victim) throws I // CheckIndex should also fail: try { - TestUtil.checkIndex(dirCopy, true, true); + TestUtil.checkIndex(dirCopy, true, true, null); fail("wrong file " + victim + " not detected"); } catch (CorruptIndexException | EOFException | IndexFormatTooOldException e) { // expected diff --git a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java index 7fe7c3b0d5a3..c2544b4ffba0 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java +++ b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java @@ -850,7 +850,7 @@ public synchronized void close() throws IOException { System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex"); } - TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose(), true); + TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose(), true, null); } // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java index 5e328ba13661..d772ae321d3f 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java @@ -279,28 +279,30 @@ public static CheckIndex.Status checkIndex(Directory dir) throws IOException { } public static CheckIndex.Status checkIndex(Directory dir, boolean crossCheckTermVectors) throws IOException { - return checkIndex(dir, crossCheckTermVectors, false); + return checkIndex(dir, crossCheckTermVectors, false, null); } /** If failFast is true, then throw the first exception when index corruption is hit, instead of moving on to other fields/segments to * look for any other corruption. */ - public static CheckIndex.Status checkIndex(Directory dir, boolean crossCheckTermVectors, boolean failFast) throws IOException { - ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); + public static CheckIndex.Status checkIndex(Directory dir, boolean crossCheckTermVectors, boolean failFast, ByteArrayOutputStream output) throws IOException { + if (output == null) { + output = new ByteArrayOutputStream(1024); + } // TODO: actually use the dir's locking, unless test uses a special method? // some tests e.g. exception tests become much more complicated if they have to close the writer try (CheckIndex checker = new CheckIndex(dir, NoLockFactory.INSTANCE.obtainLock(dir, "bogus"))) { checker.setCrossCheckTermVectors(crossCheckTermVectors); checker.setFailFast(failFast); - checker.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8), false); + checker.setInfoStream(new PrintStream(output, false, IOUtils.UTF_8), false); CheckIndex.Status indexStatus = checker.checkIndex(null); if (indexStatus == null || indexStatus.clean == false) { System.out.println("CheckIndex failed"); - System.out.println(bos.toString(IOUtils.UTF_8)); + System.out.println(output.toString(IOUtils.UTF_8)); throw new RuntimeException("CheckIndex failed"); } else { if (LuceneTestCase.INFOSTREAM) { - System.out.println(bos.toString(IOUtils.UTF_8)); + System.out.println(output.toString(IOUtils.UTF_8)); } return indexStatus; } From 94d333983e5a57d1aae7278c6f2c030c6d401dcf Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Wed, 9 Mar 2016 11:25:00 -0500 Subject: [PATCH 0081/1113] add missing argument --- .../test/org/apache/lucene/index/TestMaxPositionInOldIndex.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestMaxPositionInOldIndex.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestMaxPositionInOldIndex.java index 87625d206742..3608b8067e75 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestMaxPositionInOldIndex.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestMaxPositionInOldIndex.java @@ -82,7 +82,7 @@ public void testCorruptIndex() throws Exception { BaseDirectoryWrapper dir = newFSDirectory(path); dir.setCheckIndexOnClose(false); RuntimeException expected = expectThrows(RuntimeException.class, () -> { - TestUtil.checkIndex(dir, false, true); + TestUtil.checkIndex(dir, false, true, null); }); assertTrue(expected.getMessage().contains("pos 2147483647 > IndexWriter.MAX_POSITION=2147483519")); From 5a43a3e772e47bb086db3c1d635e9b17e28a39f7 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Wed, 9 Mar 2016 16:00:15 +0000 Subject: [PATCH 0082/1113] LUCENE-7084: fail precommit on comparingIdentical. also replaces one assert in the SingletonSortedSetDocValues constructor with equivalent test (TestSortedSetDocValues.testNoMoreOrdsConstant). --- .../index/SingletonSortedSetDocValues.java | 1 - .../lucene/index/TestSortedSetDocValues.java | 27 +++++++++++++++++++ lucene/tools/javadoc/ecj.javadocs.prefs | 2 +- 3 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java diff --git a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java index c90fcc5fafd0..5077cd843202 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/SingletonSortedSetDocValues.java @@ -33,7 +33,6 @@ final class SingletonSortedSetDocValues extends RandomAccessOrds { /** Creates a multi-valued view over the provided SortedDocValues */ public SingletonSortedSetDocValues(SortedDocValues in) { this.in = in; - assert NO_MORE_ORDS == -1; // this allows our nextOrd() to work for missing values without a check } /** Return the wrapped {@link SortedDocValues} */ diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java b/lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java new file mode 100644 index 000000000000..8cffeaf00a65 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/index/TestSortedSetDocValues.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.index; + +import org.apache.lucene.util.LuceneTestCase; + +public class TestSortedSetDocValues extends LuceneTestCase { + + public void testNoMoreOrdsConstant() { + assertEquals(SortedSetDocValues.NO_MORE_ORDS, -1); + } + +} diff --git a/lucene/tools/javadoc/ecj.javadocs.prefs b/lucene/tools/javadoc/ecj.javadocs.prefs index d01148c602fa..bd98c99ac621 100644 --- a/lucene/tools/javadoc/ecj.javadocs.prefs +++ b/lucene/tools/javadoc/ecj.javadocs.prefs @@ -11,7 +11,7 @@ org.eclipse.jdt.core.compiler.doc.comment.support=enabled org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=ignore org.eclipse.jdt.core.compiler.problem.assertIdentifier=error org.eclipse.jdt.core.compiler.problem.autoboxing=ignore -org.eclipse.jdt.core.compiler.problem.comparingIdentical=ignore +org.eclipse.jdt.core.compiler.problem.comparingIdentical=error org.eclipse.jdt.core.compiler.problem.deadCode=ignore org.eclipse.jdt.core.compiler.problem.deprecation=ignore org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled From 42361a68bc27266d8f50e90b85ffd980ac953f36 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Mar 2016 11:25:37 -0500 Subject: [PATCH 0083/1113] LUCENE-7079: add newSetQuery(String, Collection) to primitive Point types --- .../org/apache/lucene/document/DoublePoint.java | 16 ++++++++++++++++ .../org/apache/lucene/document/FloatPoint.java | 16 ++++++++++++++++ .../org/apache/lucene/document/IntPoint.java | 16 ++++++++++++++++ .../org/apache/lucene/document/LongPoint.java | 16 ++++++++++++++++ .../apache/lucene/search/TestPointQueries.java | 8 ++++++++ .../apache/lucene/search/join/TestBlockJoin.java | 7 +------ 6 files changed, 73 insertions(+), 6 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java index 26ac0ced6f10..1133b2213938 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java @@ -17,6 +17,7 @@ package org.apache.lucene.document; import java.util.Arrays; +import java.util.Collection; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; @@ -247,4 +248,19 @@ protected String toString(byte[] value) { } }; } + + /** + * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. + * + * @param field field name. must not be {@code null}. + * @param values all values to match + */ + public static Query newSetQuery(String field, Collection values) { + Double[] boxed = values.toArray(new Double[0]); + double[] unboxed = new double[boxed.length]; + for (int i = 0; i < boxed.length; i++) { + unboxed[i] = boxed[i]; + } + return newSetQuery(field, unboxed); + } } diff --git a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java index c58881ec8800..3d110db2cd86 100644 --- a/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java @@ -17,6 +17,7 @@ package org.apache.lucene.document; import java.util.Arrays; +import java.util.Collection; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; @@ -247,4 +248,19 @@ protected String toString(byte[] value) { } }; } + + /** + * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. + * + * @param field field name. must not be {@code null}. + * @param values all values to match + */ + public static Query newSetQuery(String field, Collection values) { + Float[] boxed = values.toArray(new Float[0]); + float[] unboxed = new float[boxed.length]; + for (int i = 0; i < boxed.length; i++) { + unboxed[i] = boxed[i]; + } + return newSetQuery(field, unboxed); + } } diff --git a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java index cb8315f2f205..53ae3d3dd2ac 100644 --- a/lucene/core/src/java/org/apache/lucene/document/IntPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java @@ -17,6 +17,7 @@ package org.apache.lucene.document; import java.util.Arrays; +import java.util.Collection; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; @@ -247,4 +248,19 @@ protected String toString(byte[] value) { } }; } + + /** + * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. + * + * @param field field name. must not be {@code null}. + * @param values all values to match + */ + public static Query newSetQuery(String field, Collection values) { + Integer[] boxed = values.toArray(new Integer[0]); + int[] unboxed = new int[boxed.length]; + for (int i = 0; i < boxed.length; i++) { + unboxed[i] = boxed[i]; + } + return newSetQuery(field, unboxed); + } } diff --git a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java index ff78132d7b66..c4fd8875205b 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java @@ -17,6 +17,7 @@ package org.apache.lucene.document; import java.util.Arrays; +import java.util.Collection; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.PointInSetQuery; @@ -247,4 +248,19 @@ protected String toString(byte[] value) { } }; } + + /** + * Create a query matching any of the specified 1D values. This is the points equivalent of {@code TermsQuery}. + * + * @param field field name. must not be {@code null}. + * @param values all values to match + */ + public static Query newSetQuery(String field, Collection values) { + Long[] boxed = values.toArray(new Long[0]); + long[] unboxed = new long[boxed.length]; + for (int i = 0; i < boxed.length; i++) { + unboxed[i] = boxed[i]; + } + return newSetQuery(field, unboxed); + } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java index 4d9aa593f061..c72ab44ac398 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java @@ -1602,6 +1602,14 @@ public void testBasicPointInSetQuery() throws Exception { r.close(); dir.close(); } + + /** Boxed methods for primitive types should behave the same as unboxed: just sugar */ + public void testPointIntSetBoxed() throws Exception { + assertEquals(IntPoint.newSetQuery("foo", 1, 2, 3), IntPoint.newSetQuery("foo", Arrays.asList(1, 2, 3))); + assertEquals(FloatPoint.newSetQuery("foo", 1F, 2F, 3F), FloatPoint.newSetQuery("foo", Arrays.asList(1F, 2F, 3F))); + assertEquals(LongPoint.newSetQuery("foo", 1L, 2L, 3L), LongPoint.newSetQuery("foo", Arrays.asList(1L, 2L, 3L))); + assertEquals(DoublePoint.newSetQuery("foo", 1D, 2D, 3D), DoublePoint.newSetQuery("foo", Arrays.asList(1D, 2D, 3D))); + } public void testBasicMultiValuedPointInSetQuery() throws Exception { Directory dir = newDirectory(); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java index b5f2038a0dcf..9c39299c4b4f 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java @@ -615,12 +615,7 @@ public void testRandom() throws Exception { } if (!toDelete.isEmpty()) { - // TODO: we should add newSetQuery(String, Collection) ? this is awkward. - int[] array = new int[toDelete.size()]; - for (int i = 0; i < toDelete.size(); i++) { - array[i] = toDelete.get(i); - } - Query query = IntPoint.newSetQuery("blockID", array); + Query query = IntPoint.newSetQuery("blockID", toDelete); w.deleteDocuments(query); joinW.deleteDocuments(query); } From 4e911f2d3a029ae30dad9ea5ffb42530398adcbc Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Mar 2016 12:16:59 -0500 Subject: [PATCH 0084/1113] LUCENE-7076: improve deprecation message for LegacyNumericRangeQuery --- .../lucene/search/LegacyNumericRangeQuery.java | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java index c61f7a823bfe..fe6c9e24864e 100644 --- a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java @@ -21,9 +21,14 @@ import java.util.LinkedList; import java.util.Objects; +import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType.LegacyNumericType; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.FilteredTermsEnum; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.AttributeSource; @@ -158,7 +163,12 @@ * precision step). This query type was developed for a geographic portal, where the performance for * e.g. bounding boxes or exact date/time stamps is important.

    * - * @deprecated Please use {@link PointRangeQuery} instead + * @deprecated Instead index with {@link IntPoint}, {@link LongPoint}, {@link FloatPoint}, {@link DoublePoint}, and + * create range queries with {@link IntPoint#newRangeQuery(String, int, int) IntPoint.newRangeQuery()}, + * {@link LongPoint#newRangeQuery(String, long, long) LongPoint.newRangeQuery()}, + * {@link FloatPoint#newRangeQuery(String, float, float) FloatPoint.newRangeQuery()}, + * {@link DoublePoint#newRangeQuery(String, double, double) DoublePoint.newRangeQuery()} respectively. + * See {@link PointValues} for background information on Points. * * @since 2.9 **/ From 6b2f3638969e872c704e3d192caec07ad7ef99ed Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 9 Mar 2016 17:38:07 +0000 Subject: [PATCH 0085/1113] SOLR-8765: Set parameters correctly in async shard requests --- .../solr/client/solrj/request/CollectionAdminRequest.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java index 4f28408ce210..76eb19f76409 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java @@ -203,6 +203,8 @@ protected abstract static class AsyncShardSpecificAdminRequest extends AsyncColl public AsyncShardSpecificAdminRequest(CollectionAction action, String collection, String shard) { super(action); + this.collection = collection; + this.shard = shard; } @Deprecated From a574f4724d4b963cdac1dcc1c36afe6715956125 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Wed, 9 Mar 2016 14:10:10 -0500 Subject: [PATCH 0086/1113] don't use slow composite wrapper in these tests --- .../lucene/search/join/TestBlockJoin.java | 18 +++++++++--------- .../lucene/queryparser/xml/TestCoreParser.java | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java index 9c39299c4b4f..01a81351511f 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java @@ -176,7 +176,7 @@ public void testSimple() throws Exception { IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); // Create a filter that defines "parent" documents in the index - in this case resumes BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); @@ -263,7 +263,7 @@ public void testBugCausedByRewritingTwice() throws IOException { IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); // Hacky: this causes the query to need 2 rewrite // iterations: @@ -336,7 +336,7 @@ public void testSimpleFilter() throws Exception { IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); // Create a filter that defines "parent" documents in the index - in this case resumes BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); @@ -640,7 +640,7 @@ public void testRandom() throws Exception { } } - final IndexSearcher s = newSearcher(r); + final IndexSearcher s = newSearcher(r, false); final IndexSearcher joinS = new IndexSearcher(joinR); @@ -1051,7 +1051,7 @@ public void testMultiChildTypes() throws Exception { IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); // Create a filter that defines "parent" documents in the index - in this case resumes BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); @@ -1575,7 +1575,7 @@ public void testParentScoringBug() throws Exception { IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); // Create a filter that defines "parent" documents in the index - in this case resumes BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); @@ -1613,7 +1613,7 @@ public void testToChildBlockJoinQueryExplain() throws Exception { IndexReader r = w.getReader(); w.close(); - IndexSearcher s = newSearcher(r); + IndexSearcher s = newSearcher(r, false); // Create a filter that defines "parent" documents in the index - in this case resumes BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); @@ -1644,7 +1644,7 @@ public void testToChildInitialAdvanceParentButNoKids() throws Exception { w.forceMerge(1); final IndexReader r = w.getReader(); - final IndexSearcher s = newSearcher(r); + final IndexSearcher s = newSearcher(r, false); w.close(); BitSetProducer parentFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); @@ -1690,7 +1690,7 @@ public void testMultiChildQueriesOfDiffParentLevels() throws Exception { } final IndexReader r = w.getReader(); - final IndexSearcher s = newSearcher(r); + final IndexSearcher s = newSearcher(r, false); w.close(); BitSetProducer resumeFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java index 5cadec21f3f6..c8b357ed30e4 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java @@ -80,7 +80,7 @@ public static void beforeClass() throws Exception { d.close(); writer.close(); reader = DirectoryReader.open(dir); - searcher = newSearcher(reader); + searcher = newSearcher(reader, false); } From a7cec361654c20153ac4fdd9ede9914422e34a47 Mon Sep 17 00:00:00 2001 From: Erick Erickson Date: Wed, 9 Mar 2016 11:40:44 -0800 Subject: [PATCH 0087/1113] SOLR-8813: Add test for MultiValued fields being returned in the correct order --- .../conf/schema-non-stored-docvalues.xml | 131 +++++++++--------- .../solr/schema/TestUseDocValuesAsStored.java | 60 +++++++- 2 files changed, 126 insertions(+), 65 deletions(-) diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml b/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml index aab4da4e2e87..f700e60a4169 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml @@ -35,70 +35,73 @@ 1.6: useDocValuesAsStored defaults to true. --> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id diff --git a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java index 46f76ab8deca..839121acd3c1 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java +++ b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java @@ -76,7 +76,7 @@ public String getCoreName() { @Test public void testOnEmptyIndex() throws Exception { - assertU(delQ("*:*")); + clearIndex(); assertU(commit()); assertJQ(req("q", "*:*"), "/response/numFound==0"); assertJQ(req("q", "*:*", "fl", "*"), "/response/numFound==0"); @@ -238,4 +238,62 @@ private void doTest(String desc, String field, String type, String... value) { assertQ(desc + ": " + fl, req("q", "*:*", "fl", fl), xpaths); } + + // See SOLR-8740 for a discussion. This test is here to make sure we consciously change behavior of multiValued + // fields given that we can now return docValues fields. The behavior we've guaranteed in the past is that if + // multiValued fields are stored, they're returned in the document in the order they were added. + // There are four new fieldTypes added: + // + // + // + // + // + // If any of these tests break as a result of returning DocValues rather than stored values, make sure we reach some + // consensus that any breaks on back-compat are A Good Thing and that that behavior is carefully documented! + + @Test + public void testMultivaluedOrdering() throws Exception { + clearIndex(); + + // multiValued=true, docValues=true, stored=true. Should return in original order + assertU(adoc("id", "1", "test_mvt_dvt_st_str", "cccc", "test_mvt_dvt_st_str", "aaaa", "test_mvt_dvt_st_str", "bbbb")); + + // multiValued=true, docValues=true, stored=false. Should return in sorted order + assertU(adoc("id", "2", "test_mvt_dvt_sf_str", "cccc", "test_mvt_dvt_sf_str", "aaaa", "test_mvt_dvt_sf_str", "bbbb")); + + // multiValued=true, docValues=false, stored=true. Should return in original order + assertU(adoc("id", "3", "test_mvt_dvf_st_str", "cccc", "test_mvt_dvf_st_str", "aaaa", "test_mvt_dvf_st_str", "bbbb")); + + // multiValued=true, docValues=not specified, stored=true. Should return in original order + assertU(adoc("id", "4", "test_mvt_dvu_st_str", "cccc", "test_mvt_dvu_st_str", "aaaa", "test_mvt_dvu_st_str", "bbbb")); + + assertU(commit()); + + assertJQ(req("q", "id:1", "fl", "test_mvt_dvt_st_str"), + "/response/docs/[0]/test_mvt_dvt_st_str/[0]==cccc", + "/response/docs/[0]/test_mvt_dvt_st_str/[1]==aaaa", + "/response/docs/[0]/test_mvt_dvt_st_str/[2]==bbbb"); + + // Currently, this test fails since stored=false. When SOLR-8740 is committed, it should not throw an exception + // and should succeed, returning the field in sorted order. + try { + assertJQ(req("q", "id:2", "fl", "test_mvt_dvt_sf_str"), + "/response/docs/[0]/test_mvt_dvt_sf_str/[0]==aaaa", + "/response/docs/[0]/test_mvt_dvt_sf_str/[1]==bbbb", + "/response/docs/[0]/test_mvt_dvt_sf_str/[2]==cccc"); + } catch (Exception e) { + // do nothing until SOLR-8740 is committed. At that point this should not throw an exception. + // NOTE: I think the test is correct after 8740 so just remove the try/catch + } + assertJQ(req("q", "id:3", "fl", "test_mvt_dvf_st_str"), + "/response/docs/[0]/test_mvt_dvf_st_str/[0]==cccc", + "/response/docs/[0]/test_mvt_dvf_st_str/[1]==aaaa", + "/response/docs/[0]/test_mvt_dvf_st_str/[2]==bbbb"); + + assertJQ(req("q", "id:4", "fl", "test_mvt_dvu_st_str"), + "/response/docs/[0]/test_mvt_dvu_st_str/[0]==cccc", + "/response/docs/[0]/test_mvt_dvu_st_str/[1]==aaaa", + "/response/docs/[0]/test_mvt_dvu_st_str/[2]==bbbb"); + + } } From 8b408be7a2960a5f5a371cc6acb5af3e15b31344 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 9 Mar 2016 21:15:40 +0000 Subject: [PATCH 0088/1113] SOLR-8765: Throw SolrException rather than IAE on name validation --- .../client/solrj/util/SolrIdentifierValidator.java | 4 +++- .../solrj/request/TestCollectionAdminRequest.java | 9 +++++---- .../solr/client/solrj/request/TestCoreAdmin.java | 13 ++++++------- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java index 57f9909f0514..9473a2806b67 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java @@ -19,6 +19,8 @@ import java.util.Locale; import java.util.regex.Pattern; +import org.apache.solr.common.SolrException; + /** * Ensures that provided identifiers align with Solr's recommendations/requirements for choosing * collection, core, etc identifiers. @@ -34,7 +36,7 @@ public enum IdentifierType { public static String validateName(IdentifierType type, String name) { if (!validateIdentifier(name)) - throw new IllegalArgumentException(getIdentifierMessage(type, name)); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, getIdentifierMessage(type, name)); return name; } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java index 5d5c31517bd0..c21e523855c0 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCollectionAdminRequest.java @@ -20,6 +20,7 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest.Create; import org.apache.solr.client.solrj.request.CollectionAdminRequest.CreateAlias; import org.apache.solr.client.solrj.request.CollectionAdminRequest.CreateShard; +import org.apache.solr.common.SolrException; import org.junit.Test; /** @@ -33,7 +34,7 @@ public void testInvalidCollectionNameRejectedWhenCreatingCollection() { try { createRequest.setCollectionName("invalid$collection@name"); fail(); - } catch (IllegalArgumentException e) { + } catch (SolrException e) { final String exceptionMessage = e.getMessage(); assertTrue(exceptionMessage.contains("Invalid collection")); assertTrue(exceptionMessage.contains("invalid$collection@name")); @@ -47,7 +48,7 @@ public void testInvalidShardNamesRejectedWhenCreatingCollection() { try { createRequest.setShards("invalid$shard@name"); fail(); - } catch (IllegalArgumentException e) { + } catch (SolrException e) { final String exceptionMessage = e.getMessage(); assertTrue(exceptionMessage.contains("Invalid shard")); assertTrue(exceptionMessage.contains("invalid$shard@name")); @@ -61,7 +62,7 @@ public void testInvalidAliasNameRejectedWhenCreatingAlias() { try { createAliasRequest.setAliasName("invalid$alias@name"); fail(); - } catch (IllegalArgumentException e) { + } catch (SolrException e) { final String exceptionMessage = e.getMessage(); assertTrue(exceptionMessage.contains("Invalid alias")); assertTrue(exceptionMessage.contains("invalid$alias@name")); @@ -75,7 +76,7 @@ public void testInvalidShardNameRejectedWhenCreatingShard() { try { createShardRequest.setShardName("invalid$shard@name"); fail(); - } catch (IllegalArgumentException e) { + } catch (SolrException e) { final String exceptionMessage = e.getMessage(); assertTrue(exceptionMessage.contains("Invalid shard")); assertTrue(exceptionMessage.contains("invalid$shard@name")); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java index f3c3d55d5fcd..ef4dad7b6622 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java @@ -16,12 +16,11 @@ */ package org.apache.solr.client.solrj.request; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.core.Is.is; - import java.io.File; import java.lang.invoke.MethodHandles; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrIgnoredThreadsFilter; @@ -43,8 +42,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.core.Is.is; @ThreadLeakFilters(defaultFilters = true, filters = {SolrIgnoredThreadsFilter.class}) public class TestCoreAdmin extends AbstractEmbeddedSolrServerTestCase { @@ -167,7 +166,7 @@ public void testInvalidCoreNamesAreRejectedWhenCreatingCore() { try { createRequest.setCoreName("invalid$core@name"); fail(); - } catch (IllegalArgumentException e) { + } catch (SolrException e) { final String exceptionMessage = e.getMessage(); assertTrue(exceptionMessage.contains("Invalid core")); assertTrue(exceptionMessage.contains("invalid$core@name")); @@ -180,7 +179,7 @@ public void testInvalidCoreNamesAreRejectedWhenRenamingExistingCore() throws Exc try { CoreAdminRequest.renameCore("validExistingCoreName", "invalid$core@name", null); fail(); - } catch (IllegalArgumentException e) { + } catch (SolrException e) { final String exceptionMessage = e.getMessage(); assertTrue(e.getMessage(), exceptionMessage.contains("Invalid core")); assertTrue(exceptionMessage.contains("invalid$core@name")); From c8543efacf43dafa76214a96d47876f12c3cf666 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Mar 2016 21:05:26 -0500 Subject: [PATCH 0089/1113] LUCENE-7089, LUCENE-7075: add points to flexible queryparser to replace legacy numerics support --- .../flexible/core/nodes/package-info.java | 4 +- .../standard/StandardQueryParser.java | 29 +++- ...> LegacyNumericRangeQueryNodeBuilder.java} | 26 +-- .../builders/PointRangeQueryNodeBuilder.java | 137 ++++++++++++++++ .../builders/StandardQueryTreeBuilder.java | 12 +- ...icConfig.java => LegacyNumericConfig.java} | 16 +- ... => LegacyNumericFieldConfigListener.java} | 26 +-- .../standard/config/PointsConfig.java | 124 ++++++++++++++ .../standard/config/PointsConfigListener.java | 65 ++++++++ .../config/StandardQueryConfigHandler.java | 39 +++-- .../nodes/LegacyNumericQueryNode.java | 153 ++++++++++++++++++ ....java => LegacyNumericRangeQueryNode.java} | 40 ++--- ...ericQueryNode.java => PointQueryNode.java} | 12 +- .../standard/nodes/PointRangeQueryNode.java | 124 ++++++++++++++ ...a => LegacyNumericQueryNodeProcessor.java} | 38 ++--- ...LegacyNumericRangeQueryNodeProcessor.java} | 38 ++--- .../processors/PointQueryNodeProcessor.java | 136 ++++++++++++++++ .../PointRangeQueryNodeProcessor.java | 148 +++++++++++++++++ .../StandardQueryNodeProcessorPipeline.java | 6 +- ...java => TestLegacyNumericQueryParser.java} | 12 +- .../standard/TestPointQueryParser.java | 82 ++++++++++ lucene/tools/junit4/cached-timehints.txt | 2 +- 22 files changed, 1148 insertions(+), 121 deletions(-) rename lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/{NumericRangeQueryNodeBuilder.java => LegacyNumericRangeQueryNodeBuilder.java} (75%) create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java rename lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/{NumericConfig.java => LegacyNumericConfig.java} (90%) rename lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/{NumericFieldConfigListener.java => LegacyNumericFieldConfigListener.java} (67%) create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java rename lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/{NumericRangeQueryNode.java => LegacyNumericRangeQueryNode.java} (73%) rename lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/{NumericQueryNode.java => PointQueryNode.java} (91%) create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java rename lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/{NumericQueryNodeProcessor.java => LegacyNumericQueryNodeProcessor.java} (78%) rename lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/{NumericRangeQueryNodeProcessor.java => LegacyNumericRangeQueryNodeProcessor.java} (81%) create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java create mode 100644 lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java rename lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/{TestNumericQueryParser.java => TestLegacyNumericQueryParser.java} (97%) create mode 100644 lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java index c9d55eab12c4..23c72a1ada9b 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/core/nodes/package-info.java @@ -52,14 +52,14 @@ *
  • FuzzyQueryNode - fuzzy node
  • *
  • TermRangeQueryNode - used for parametric field:[low_value TO high_value]
  • *
  • ProximityQueryNode - used for proximity search
  • - *
  • NumericRangeQueryNode - used for numeric range search
  • + *
  • LegacyNumericRangeQueryNode - used for numeric range search
  • *
  • TokenizedPhraseQueryNode - used by tokenizers/lemmatizers/analyzers for phrases/autophrases
  • * *

    * Leaf Nodes: *

      *
    • FieldQueryNode - field/value node
    • - *
    • NumericQueryNode - used for numeric search
    • + *
    • LegacyNumericQueryNode - used for numeric search
    • *
    • PathQueryNode - {@link org.apache.lucene.queryparser.flexible.core.nodes.QueryNode} object used with path-like queries
    • *
    • OpaqueQueryNode - Used as for part of the query that can be parsed by other parsers. schema/value
    • *
    • PrefixWildcardQueryNode - non-phrase wildcard query
    • diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java index ada65a4ca7af..2774cf091ff1 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/StandardQueryParser.java @@ -29,7 +29,8 @@ import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryTreeBuilder; import org.apache.lucene.queryparser.flexible.standard.config.FuzzyConfig; -import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig; import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; @@ -322,12 +323,30 @@ public void setFuzzyPrefixLength(int fuzzyPrefixLength) { } - public void setNumericConfigMap(Map numericConfigMap) { - getQueryConfigHandler().set(ConfigurationKeys.NUMERIC_CONFIG_MAP, numericConfigMap); + /** + * Sets field configuration for legacy numeric fields + * @deprecated Index with points instead and use {@link #setPointsConfigMap(Map)} + */ + @Deprecated + public void setLegacyNumericConfigMap(Map legacyNumericConfigMap) { + getQueryConfigHandler().set(ConfigurationKeys.LEGACY_NUMERIC_CONFIG_MAP, legacyNumericConfigMap); + } + + /** + * Gets field configuration for legacy numeric fields + * @deprecated Index with points instead and use {@link #getPointsConfigMap()} + */ + @Deprecated + public Map getLegacyNumericConfigMap() { + return getQueryConfigHandler().get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG_MAP); + } + + public void setPointsConfigMap(Map pointsConfigMap) { + getQueryConfigHandler().set(ConfigurationKeys.POINTS_CONFIG_MAP, pointsConfigMap); } - public Map getNumericConfigMap() { - return getQueryConfigHandler().get(ConfigurationKeys.NUMERIC_CONFIG_MAP); + public Map getPointsConfigMap() { + return getQueryConfigHandler().get(ConfigurationKeys.POINTS_CONFIG_MAP); } /** diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/NumericRangeQueryNodeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java similarity index 75% rename from lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/NumericRangeQueryNodeBuilder.java rename to lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java index 6c8790ff8f06..8ae7d5e2e43f 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/NumericRangeQueryNodeBuilder.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java @@ -22,38 +22,40 @@ import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; import org.apache.lucene.queryparser.flexible.core.util.StringUtils; import org.apache.lucene.queryparser.flexible.messages.MessageImpl; -import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode; import org.apache.lucene.search.LegacyNumericRangeQuery; /** - * Builds {@link org.apache.lucene.search.LegacyNumericRangeQuery}s out of {@link NumericRangeQueryNode}s. + * Builds {@link org.apache.lucene.search.LegacyNumericRangeQuery}s out of {@link LegacyNumericRangeQueryNode}s. * * @see org.apache.lucene.search.LegacyNumericRangeQuery - * @see NumericRangeQueryNode + * @see LegacyNumericRangeQueryNode + * @deprecated Index with points and use {@link PointRangeQueryNodeBuilder} instead. */ -public class NumericRangeQueryNodeBuilder implements StandardQueryBuilder { +@Deprecated +public class LegacyNumericRangeQueryNodeBuilder implements StandardQueryBuilder { /** - * Constructs a {@link NumericRangeQueryNodeBuilder} object. + * Constructs a {@link LegacyNumericRangeQueryNodeBuilder} object. */ - public NumericRangeQueryNodeBuilder() { + public LegacyNumericRangeQueryNodeBuilder() { // empty constructor } @Override public LegacyNumericRangeQuery build(QueryNode queryNode) throws QueryNodeException { - NumericRangeQueryNode numericRangeNode = (NumericRangeQueryNode) queryNode; + LegacyNumericRangeQueryNode numericRangeNode = (LegacyNumericRangeQueryNode) queryNode; - NumericQueryNode lowerNumericNode = numericRangeNode.getLowerBound(); - NumericQueryNode upperNumericNode = numericRangeNode.getUpperBound(); + LegacyNumericQueryNode lowerNumericNode = numericRangeNode.getLowerBound(); + LegacyNumericQueryNode upperNumericNode = numericRangeNode.getUpperBound(); Number lowerNumber = lowerNumericNode.getValue(); Number upperNumber = upperNumericNode.getValue(); - NumericConfig numericConfig = numericRangeNode.getNumericConfig(); + LegacyNumericConfig numericConfig = numericRangeNode.getNumericConfig(); FieldType.LegacyNumericType numberType = numericConfig.getType(); String field = StringUtils.toString(numericRangeNode.getField()); boolean minInclusive = numericRangeNode.isLowerInclusive(); diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java new file mode 100644 index 000000000000..0cce4bf94f2b --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/PointRangeQueryNodeBuilder.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard.builders; + +import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.util.StringUtils; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode; +import org.apache.lucene.search.Query; + +/** + * Builds {@link PointValues} range queries out of {@link PointRangeQueryNode}s. + * + * @see PointRangeQueryNode + */ +public class PointRangeQueryNodeBuilder implements StandardQueryBuilder { + + /** + * Constructs a {@link PointRangeQueryNodeBuilder} object. + */ + public PointRangeQueryNodeBuilder() { + // empty constructor + } + + @Override + public Query build(QueryNode queryNode) throws QueryNodeException { + PointRangeQueryNode numericRangeNode = (PointRangeQueryNode) queryNode; + + PointQueryNode lowerNumericNode = numericRangeNode.getLowerBound(); + PointQueryNode upperNumericNode = numericRangeNode.getUpperBound(); + + Number lowerNumber = lowerNumericNode.getValue(); + Number upperNumber = upperNumericNode.getValue(); + + PointsConfig pointsConfig = numericRangeNode.getPointsConfig(); + Class numberType = pointsConfig.getType(); + String field = StringUtils.toString(numericRangeNode.getField()); + boolean minInclusive = numericRangeNode.isLowerInclusive(); + boolean maxInclusive = numericRangeNode.isUpperInclusive(); + + // TODO: push down cleaning up of crazy nulls and inclusive/exclusive elsewhere + if (Integer.class.equals(numberType)) { + Integer lower = (Integer) lowerNumber; + if (lower == null) { + lower = Integer.MIN_VALUE; + } + if (minInclusive == false) { + lower = lower + 1; + } + + Integer upper = (Integer) upperNumber; + if (upper == null) { + upper = Integer.MAX_VALUE; + } + if (maxInclusive == false) { + upper = upper - 1; + } + return IntPoint.newRangeQuery(field, lower, upper); + } else if (Long.class.equals(numberType)) { + Long lower = (Long) lowerNumber; + if (lower == null) { + lower = Long.MIN_VALUE; + } + if (minInclusive == false) { + lower = lower + 1; + } + + Long upper = (Long) upperNumber; + if (upper == null) { + upper = Long.MAX_VALUE; + } + if (maxInclusive == false) { + upper = upper - 1; + } + return LongPoint.newRangeQuery(field, lower, upper); + } else if (Float.class.equals(numberType)) { + Float lower = (Float) lowerNumber; + if (lower == null) { + lower = Float.NEGATIVE_INFINITY; + } + if (minInclusive == false) { + lower = Math.nextUp(lower); + } + + Float upper = (Float) upperNumber; + if (upper == null) { + upper = Float.POSITIVE_INFINITY; + } + if (maxInclusive == false) { + upper = Math.nextDown(upper); + } + return FloatPoint.newRangeQuery(field, lower, upper); + } else if (Double.class.equals(numberType)) { + Double lower = (Double) lowerNumber; + if (lower == null) { + lower = Double.NEGATIVE_INFINITY; + } + if (minInclusive == false) { + lower = Math.nextUp(lower); + } + + Double upper = (Double) upperNumber; + if (upper == null) { + upper = Double.POSITIVE_INFINITY; + } + if (maxInclusive == false) { + upper = Math.nextDown(upper); + } + return DoublePoint.newRangeQuery(field, lower, upper); + } else { + throw new QueryNodeException(new MessageImpl(QueryParserMessages.UNSUPPORTED_NUMERIC_DATA_TYPE, numberType)); + } + } +} diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java index 2d7c6434be86..360f6a7ff838 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/StandardQueryTreeBuilder.java @@ -30,8 +30,10 @@ import org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode; import org.apache.lucene.queryparser.flexible.core.nodes.TokenizedPhraseQueryNode; import org.apache.lucene.queryparser.flexible.standard.nodes.MultiPhraseQueryNode; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode; import org.apache.lucene.queryparser.flexible.standard.nodes.PrefixWildcardQueryNode; import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode; import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode; @@ -57,8 +59,10 @@ public StandardQueryTreeBuilder() { setBuilder(FieldQueryNode.class, new FieldQueryNodeBuilder()); setBuilder(BooleanQueryNode.class, new BooleanQueryNodeBuilder()); setBuilder(FuzzyQueryNode.class, new FuzzyQueryNodeBuilder()); - setBuilder(NumericQueryNode.class, new DummyQueryNodeBuilder()); - setBuilder(NumericRangeQueryNode.class, new NumericRangeQueryNodeBuilder()); + setBuilder(LegacyNumericQueryNode.class, new DummyQueryNodeBuilder()); + setBuilder(LegacyNumericRangeQueryNode.class, new LegacyNumericRangeQueryNodeBuilder()); + setBuilder(PointQueryNode.class, new DummyQueryNodeBuilder()); + setBuilder(PointRangeQueryNode.class, new PointRangeQueryNodeBuilder()); setBuilder(BoostQueryNode.class, new BoostQueryNodeBuilder()); setBuilder(ModifierQueryNode.class, new ModifierQueryNodeBuilder()); setBuilder(WildcardQueryNode.class, new WildcardQueryNodeBuilder()); diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericConfig.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java similarity index 90% rename from lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericConfig.java rename to lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java index c457a4ebc46b..985f55a616fd 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericConfig.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java @@ -28,8 +28,10 @@ * * @see org.apache.lucene.search.LegacyNumericRangeQuery * @see NumberFormat + * @deprecated Index with Points instead and use {@link PointsConfig} */ -public class NumericConfig { +@Deprecated +public class LegacyNumericConfig { private int precisionStep; @@ -38,7 +40,7 @@ public class NumericConfig { private FieldType.LegacyNumericType type; /** - * Constructs a {@link NumericConfig} object. + * Constructs a {@link LegacyNumericConfig} object. * * @param precisionStep * the precision used to index the numeric values @@ -48,11 +50,11 @@ public class NumericConfig { * @param type * the numeric type used to index the numeric values * - * @see NumericConfig#setPrecisionStep(int) - * @see NumericConfig#setNumberFormat(NumberFormat) + * @see LegacyNumericConfig#setPrecisionStep(int) + * @see LegacyNumericConfig#setNumberFormat(NumberFormat) * @see #setType(org.apache.lucene.document.FieldType.LegacyNumericType) */ - public NumericConfig(int precisionStep, NumberFormat format, + public LegacyNumericConfig(int precisionStep, NumberFormat format, LegacyNumericType type) { setPrecisionStep(precisionStep); setNumberFormat(format); @@ -141,8 +143,8 @@ public boolean equals(Object obj) { if (obj == this) return true; - if (obj instanceof NumericConfig) { - NumericConfig other = (NumericConfig) obj; + if (obj instanceof LegacyNumericConfig) { + LegacyNumericConfig other = (LegacyNumericConfig) obj; if (this.precisionStep == other.precisionStep && this.type == other.type diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericFieldConfigListener.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericFieldConfigListener.java similarity index 67% rename from lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericFieldConfigListener.java rename to lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericFieldConfigListener.java index c28cf2c92dff..f2d312489f61 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/NumericFieldConfigListener.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericFieldConfigListener.java @@ -25,25 +25,27 @@ /** * This listener is used to listen to {@link FieldConfig} requests in - * {@link QueryConfigHandler} and add {@link ConfigurationKeys#NUMERIC_CONFIG} - * based on the {@link ConfigurationKeys#NUMERIC_CONFIG_MAP} set in the + * {@link QueryConfigHandler} and add {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} + * based on the {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG_MAP} set in the * {@link QueryConfigHandler}. * - * @see NumericConfig + * @see LegacyNumericConfig * @see QueryConfigHandler - * @see ConfigurationKeys#NUMERIC_CONFIG - * @see ConfigurationKeys#NUMERIC_CONFIG_MAP + * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG + * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG_MAP + * @deprecated Index with Points instead and use {@link PointsConfigListener} */ -public class NumericFieldConfigListener implements FieldConfigListener { +@Deprecated +public class LegacyNumericFieldConfigListener implements FieldConfigListener { final private QueryConfigHandler config; /** - * Construcs a {@link NumericFieldConfigListener} object using the given {@link QueryConfigHandler}. + * Constructs a {@link LegacyNumericFieldConfigListener} object using the given {@link QueryConfigHandler}. * * @param config the {@link QueryConfigHandler} it will listen too */ - public NumericFieldConfigListener(QueryConfigHandler config) { + public LegacyNumericFieldConfigListener(QueryConfigHandler config) { if (config == null) { throw new IllegalArgumentException("config cannot be null!"); @@ -55,15 +57,15 @@ public NumericFieldConfigListener(QueryConfigHandler config) { @Override public void buildFieldConfig(FieldConfig fieldConfig) { - Map numericConfigMap = config - .get(ConfigurationKeys.NUMERIC_CONFIG_MAP); + Map numericConfigMap = config + .get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG_MAP); if (numericConfigMap != null) { - NumericConfig numericConfig = numericConfigMap + LegacyNumericConfig numericConfig = numericConfigMap .get(fieldConfig.getField()); if (numericConfig != null) { - fieldConfig.set(ConfigurationKeys.NUMERIC_CONFIG, numericConfig); + fieldConfig.set(ConfigurationKeys.LEGACY_NUMERIC_CONFIG, numericConfig); } } diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java new file mode 100644 index 000000000000..db59b48123a4 --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfig.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard.config; + +import java.text.NumberFormat; + +import org.apache.lucene.index.PointValues; + +/** + * This class holds the configuration used to parse numeric queries and create + * {@link PointValues} queries. + * + * @see PointValues + * @see NumberFormat + */ +public class PointsConfig { + + private NumberFormat format; + + private Class type; + + /** + * Constructs a {@link PointsConfig} object. + * + * @param format + * the {@link NumberFormat} used to parse a {@link String} to + * {@link Number} + * @param type + * the numeric type used to index the numeric values + * + * @see PointsConfig#setNumberFormat(NumberFormat) + */ + public PointsConfig(NumberFormat format, Class type) { + setNumberFormat(format); + setType(type); + } + + /** + * Returns the {@link NumberFormat} used to parse a {@link String} to + * {@link Number} + * + * @return the {@link NumberFormat} used to parse a {@link String} to + * {@link Number} + */ + public NumberFormat getNumberFormat() { + return format; + } + + /** + * Returns the numeric type used to index the numeric values + * + * @return the numeric type used to index the numeric values + */ + public Class getType() { + return type; + } + + /** + * Sets the numeric type used to index the numeric values + * + * @param type the numeric type used to index the numeric values + */ + public void setType(Class type) { + if (type == null) { + throw new IllegalArgumentException("type cannot be null!"); + } + if (Integer.class.equals(type) == false && + Long.class.equals(type) == false && + Float.class.equals(type) == false && + Double.class.equals(type) == false) { + throw new IllegalArgumentException("unsupported numeric type: " + type); + } + this.type = type; + } + + /** + * Sets the {@link NumberFormat} used to parse a {@link String} to + * {@link Number} + * + * @param format + * the {@link NumberFormat} used to parse a {@link String} to + * {@link Number}, cannot be null + */ + public void setNumberFormat(NumberFormat format) { + if (format == null) { + throw new IllegalArgumentException("format cannot be null!"); + } + this.format = format; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + format.hashCode(); + result = prime * result + type.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PointsConfig other = (PointsConfig) obj; + if (!format.equals(other.format)) return false; + if (!type.equals(other.type)) return false; + return true; + } +} diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java new file mode 100644 index 000000000000..9efbbb7e5a01 --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/PointsConfigListener.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard.config; + +import java.util.Map; + +import org.apache.lucene.queryparser.flexible.core.config.FieldConfig; +import org.apache.lucene.queryparser.flexible.core.config.FieldConfigListener; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; + +/** + * This listener is used to listen to {@link FieldConfig} requests in + * {@link QueryConfigHandler} and add {@link ConfigurationKeys#POINTS_CONFIG} + * based on the {@link ConfigurationKeys#POINTS_CONFIG_MAP} set in the + * {@link QueryConfigHandler}. + * + * @see PointsConfig + * @see QueryConfigHandler + * @see ConfigurationKeys#POINTS_CONFIG + * @see ConfigurationKeys#POINTS_CONFIG_MAP + */ +public class PointsConfigListener implements FieldConfigListener { + + final private QueryConfigHandler config; + + /** + * Constructs a {@link PointsConfigListener} object using the given {@link QueryConfigHandler}. + * + * @param config the {@link QueryConfigHandler} it will listen too + */ + public PointsConfigListener(QueryConfigHandler config) { + if (config == null) { + throw new IllegalArgumentException("config cannot be null!"); + } + this.config = config; + } + + @Override + public void buildFieldConfig(FieldConfig fieldConfig) { + Map pointsConfigMap = config.get(ConfigurationKeys.POINTS_CONFIG_MAP); + + if (pointsConfigMap != null) { + PointsConfig pointsConfig = pointsConfigMap.get(fieldConfig.getField()); + + if (pointsConfig != null) { + fieldConfig.set(ConfigurationKeys.POINTS_CONFIG, pointsConfig); + } + } + } +} diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java index 77bd7bb639cb..bba95eed91f8 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/StandardQueryConfigHandler.java @@ -167,21 +167,41 @@ final public static class ConfigurationKeys { final public static ConfigurationKey BOOST = ConfigurationKey.newInstance(); /** - * Key used to set a field to its {@link NumericConfig}. + * Key used to set a field to its {@link LegacyNumericConfig}. * - * @see StandardQueryParser#setNumericConfigMap(Map) - * @see StandardQueryParser#getNumericConfigMap() + * @see StandardQueryParser#setLegacyNumericConfigMap(Map) + * @see StandardQueryParser#getLegacyNumericConfigMap() + * @deprecated Index with Points instead and use {@link #POINTS_CONFIG} */ - final public static ConfigurationKey NUMERIC_CONFIG = ConfigurationKey.newInstance(); + @Deprecated + final public static ConfigurationKey LEGACY_NUMERIC_CONFIG = ConfigurationKey.newInstance(); /** - * Key used to set the {@link NumericConfig} in {@link FieldConfig} for numeric fields. + * Key used to set the {@link LegacyNumericConfig} in {@link FieldConfig} for numeric fields. * - * @see StandardQueryParser#setNumericConfigMap(Map) - * @see StandardQueryParser#getNumericConfigMap() + * @see StandardQueryParser#setLegacyNumericConfigMap(Map) + * @see StandardQueryParser#getLegacyNumericConfigMap() + * @deprecated Index with Points instead and use {@link #POINTS_CONFIG_MAP} */ - final public static ConfigurationKey> NUMERIC_CONFIG_MAP = ConfigurationKey.newInstance(); + @Deprecated + final public static ConfigurationKey> LEGACY_NUMERIC_CONFIG_MAP = ConfigurationKey.newInstance(); + /** + * Key used to set a field to its {@link PointsConfig}. + * + * @see StandardQueryParser#setLegacyNumericConfigMap(Map) + * @see StandardQueryParser#getLegacyNumericConfigMap() + */ + final public static ConfigurationKey POINTS_CONFIG = ConfigurationKey.newInstance(); + + /** + * Key used to set the {@link PointsConfig} in {@link FieldConfig} for point fields. + * + * @see StandardQueryParser#setLegacyNumericConfigMap(Map) + * @see StandardQueryParser#getLegacyNumericConfigMap() + */ + final public static ConfigurationKey> POINTS_CONFIG_MAP = ConfigurationKey.newInstance(); + } /** @@ -195,7 +215,8 @@ public StandardQueryConfigHandler() { // Add listener that will build the FieldConfig. addFieldConfigListener(new FieldBoostMapFCListener(this)); addFieldConfigListener(new FieldDateResolutionFCListener(this)); - addFieldConfigListener(new NumericFieldConfigListener(this)); + addFieldConfigListener(new LegacyNumericFieldConfigListener(this)); + addFieldConfigListener(new PointsConfigListener(this)); // Default Values set(ConfigurationKeys.ALLOW_LEADING_WILDCARD, false); // default in 2.9 diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java new file mode 100644 index 000000000000..b644d8a65f24 --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericQueryNode.java @@ -0,0 +1,153 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard.nodes; + +import java.text.NumberFormat; +import java.util.Locale; + +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldValuePairQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax.Type; +import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig; + +/** + * This query node represents a field query that holds a numeric value. It is + * similar to {@link FieldQueryNode}, however the {@link #getValue()} returns a + * {@link Number}. + * + * @see LegacyNumericConfig + * @deprecated Index with Points instead and use {@link PointQueryNode} instead. + */ +@Deprecated +public class LegacyNumericQueryNode extends QueryNodeImpl implements + FieldValuePairQueryNode { + + private NumberFormat numberFormat; + + private CharSequence field; + + private Number value; + + /** + * Creates a {@link LegacyNumericQueryNode} object using the given field, + * {@link Number} value and {@link NumberFormat} used to convert the value to + * {@link String}. + * + * @param field the field associated with this query node + * @param value the value hold by this node + * @param numberFormat the {@link NumberFormat} used to convert the value to {@link String} + */ + public LegacyNumericQueryNode(CharSequence field, Number value, + NumberFormat numberFormat) { + + super(); + + setNumberFormat(numberFormat); + setField(field); + setValue(value); + + } + + /** + * Returns the field associated with this node. + * + * @return the field associated with this node + */ + @Override + public CharSequence getField() { + return this.field; + } + + /** + * Sets the field associated with this node. + * + * @param fieldName the field associated with this node + */ + @Override + public void setField(CharSequence fieldName) { + this.field = fieldName; + } + + /** + * This method is used to get the value converted to {@link String} and + * escaped using the given {@link EscapeQuerySyntax}. + * + * @param escaper the {@link EscapeQuerySyntax} used to escape the value {@link String} + * + * @return the value converte to {@link String} and escaped + */ + protected CharSequence getTermEscaped(EscapeQuerySyntax escaper) { + return escaper.escape(numberFormat.format(this.value), + Locale.ROOT, Type.NORMAL); + } + + @Override + public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) { + if (isDefaultField(this.field)) { + return getTermEscaped(escapeSyntaxParser); + } else { + return this.field + ":" + getTermEscaped(escapeSyntaxParser); + } + } + + /** + * Sets the {@link NumberFormat} used to convert the value to {@link String}. + * + * @param format the {@link NumberFormat} used to convert the value to {@link String} + */ + public void setNumberFormat(NumberFormat format) { + this.numberFormat = format; + } + + /** + * Returns the {@link NumberFormat} used to convert the value to {@link String}. + * + * @return the {@link NumberFormat} used to convert the value to {@link String} + */ + public NumberFormat getNumberFormat() { + return this.numberFormat; + } + + /** + * Returns the numeric value as {@link Number}. + * + * @return the numeric value + */ + @Override + public Number getValue() { + return value; + } + + /** + * Sets the numeric value. + * + * @param value the numeric value + */ + @Override + public void setValue(Number value) { + this.value = value; + } + + @Override + public String toString() { + return ""; + } + +} diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericRangeQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java similarity index 73% rename from lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericRangeQueryNode.java rename to lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java index c132aa1dd0d1..088ab98e175a 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericRangeQueryNode.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java @@ -21,34 +21,36 @@ import org.apache.lucene.queryparser.flexible.core.QueryNodeException; import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; import org.apache.lucene.queryparser.flexible.messages.MessageImpl; -import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig; /** - * This query node represents a range query composed by {@link NumericQueryNode} + * This query node represents a range query composed by {@link LegacyNumericQueryNode} * bounds, which means the bound values are {@link Number}s. * - * @see NumericQueryNode + * @see LegacyNumericQueryNode * @see AbstractRangeQueryNode + * @deprecated Index with Points instead and use {@link PointRangeQueryNode} instead. */ -public class NumericRangeQueryNode extends - AbstractRangeQueryNode { +@Deprecated +public class LegacyNumericRangeQueryNode extends + AbstractRangeQueryNode { - public NumericConfig numericConfig; + public LegacyNumericConfig numericConfig; /** - * Constructs a {@link NumericRangeQueryNode} object using the given - * {@link NumericQueryNode} as its bounds and {@link NumericConfig}. + * Constructs a {@link LegacyNumericRangeQueryNode} object using the given + * {@link LegacyNumericQueryNode} as its bounds and {@link LegacyNumericConfig}. * * @param lower the lower bound * @param upper the upper bound * @param lowerInclusive true if the lower bound is inclusive, otherwise, false * @param upperInclusive true if the upper bound is inclusive, otherwise, false - * @param numericConfig the {@link NumericConfig} that represents associated with the upper and lower bounds + * @param numericConfig the {@link LegacyNumericConfig} that represents associated with the upper and lower bounds * - * @see #setBounds(NumericQueryNode, NumericQueryNode, boolean, boolean, NumericConfig) + * @see #setBounds(LegacyNumericQueryNode, LegacyNumericQueryNode, boolean, boolean, LegacyNumericConfig) */ - public NumericRangeQueryNode(NumericQueryNode lower, NumericQueryNode upper, - boolean lowerInclusive, boolean upperInclusive, NumericConfig numericConfig) throws QueryNodeException { + public LegacyNumericRangeQueryNode(LegacyNumericQueryNode lower, LegacyNumericQueryNode upper, + boolean lowerInclusive, boolean upperInclusive, LegacyNumericConfig numericConfig) throws QueryNodeException { setBounds(lower, upper, lowerInclusive, upperInclusive, numericConfig); } @@ -73,17 +75,17 @@ private static LegacyNumericType getNumericDataType(Number number) throws QueryN /** * Sets the upper and lower bounds of this range query node and the - * {@link NumericConfig} associated with these bounds. + * {@link LegacyNumericConfig} associated with these bounds. * * @param lower the lower bound * @param upper the upper bound * @param lowerInclusive true if the lower bound is inclusive, otherwise, false * @param upperInclusive true if the upper bound is inclusive, otherwise, false - * @param numericConfig the {@link NumericConfig} that represents associated with the upper and lower bounds + * @param numericConfig the {@link LegacyNumericConfig} that represents associated with the upper and lower bounds * */ - public void setBounds(NumericQueryNode lower, NumericQueryNode upper, - boolean lowerInclusive, boolean upperInclusive, NumericConfig numericConfig) throws QueryNodeException { + public void setBounds(LegacyNumericQueryNode lower, LegacyNumericQueryNode upper, + boolean lowerInclusive, boolean upperInclusive, LegacyNumericConfig numericConfig) throws QueryNodeException { if (numericConfig == null) { throw new IllegalArgumentException("numericConfig cannot be null!"); @@ -123,11 +125,11 @@ public void setBounds(NumericQueryNode lower, NumericQueryNode upper, } /** - * Returns the {@link NumericConfig} associated with the lower and upper bounds. + * Returns the {@link LegacyNumericConfig} associated with the lower and upper bounds. * - * @return the {@link NumericConfig} associated with the lower and upper bounds + * @return the {@link LegacyNumericConfig} associated with the lower and upper bounds */ - public NumericConfig getNumericConfig() { + public LegacyNumericConfig getNumericConfig() { return this.numericConfig; } diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointQueryNode.java similarity index 91% rename from lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericQueryNode.java rename to lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointQueryNode.java index 7509a39bd71f..6d4cba778a86 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/NumericQueryNode.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointQueryNode.java @@ -24,16 +24,16 @@ import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl; import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax.Type; -import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig; /** - * This query node represents a field query that holds a numeric value. It is + * This query node represents a field query that holds a point value. It is * similar to {@link FieldQueryNode}, however the {@link #getValue()} returns a * {@link Number}. * - * @see NumericConfig + * @see PointsConfig */ -public class NumericQueryNode extends QueryNodeImpl implements +public class PointQueryNode extends QueryNodeImpl implements FieldValuePairQueryNode { private NumberFormat numberFormat; @@ -43,7 +43,7 @@ public class NumericQueryNode extends QueryNodeImpl implements private Number value; /** - * Creates a {@link NumericQueryNode} object using the given field, + * Creates a {@link PointQueryNode} object using the given field, * {@link Number} value and {@link NumberFormat} used to convert the value to * {@link String}. * @@ -51,7 +51,7 @@ public class NumericQueryNode extends QueryNodeImpl implements * @param value the value hold by this node * @param numberFormat the {@link NumberFormat} used to convert the value to {@link String} */ - public NumericQueryNode(CharSequence field, Number value, + public PointQueryNode(CharSequence field, Number value, NumberFormat numberFormat) { super(); diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java new file mode 100644 index 000000000000..cb838fc7d6cc --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/PointRangeQueryNode.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard.nodes; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig; + +/** + * This query node represents a range query composed by {@link PointQueryNode} + * bounds, which means the bound values are {@link Number}s. + * + * @see PointQueryNode + * @see AbstractRangeQueryNode + */ +public class PointRangeQueryNode extends AbstractRangeQueryNode { + + public PointsConfig numericConfig; + + /** + * Constructs a {@link PointRangeQueryNode} object using the given + * {@link PointQueryNode} as its bounds and {@link PointsConfig}. + * + * @param lower the lower bound + * @param upper the upper bound + * @param lowerInclusive true if the lower bound is inclusive, otherwise, false + * @param upperInclusive true if the upper bound is inclusive, otherwise, false + * @param numericConfig the {@link PointsConfig} that represents associated with the upper and lower bounds + * + * @see #setBounds(PointQueryNode, PointQueryNode, boolean, boolean, PointsConfig) + */ + public PointRangeQueryNode(PointQueryNode lower, PointQueryNode upper, + boolean lowerInclusive, boolean upperInclusive, PointsConfig numericConfig) throws QueryNodeException { + setBounds(lower, upper, lowerInclusive, upperInclusive, numericConfig); + } + + /** + * Sets the upper and lower bounds of this range query node and the + * {@link PointsConfig} associated with these bounds. + * + * @param lower the lower bound + * @param upper the upper bound + * @param lowerInclusive true if the lower bound is inclusive, otherwise, false + * @param upperInclusive true if the upper bound is inclusive, otherwise, false + * @param pointsConfig the {@link PointsConfig} that represents associated with the upper and lower bounds + * + */ + public void setBounds(PointQueryNode lower, PointQueryNode upper, + boolean lowerInclusive, boolean upperInclusive, PointsConfig pointsConfig) throws QueryNodeException { + + if (pointsConfig == null) { + throw new IllegalArgumentException("pointsConfig cannot be null!"); + } + + Class lowerNumberType, upperNumberType; + + if (lower != null && lower.getValue() != null) { + lowerNumberType = lower.getValue().getClass(); + } else { + lowerNumberType = null; + } + + if (upper != null && upper.getValue() != null) { + upperNumberType = upper.getValue().getClass(); + } else { + upperNumberType = null; + } + + if (lowerNumberType != null + && !lowerNumberType.equals(pointsConfig.getType())) { + throw new IllegalArgumentException( + "lower value's type should be the same as numericConfig type: " + + lowerNumberType + " != " + pointsConfig.getType()); + } + + if (upperNumberType != null + && !upperNumberType.equals(pointsConfig.getType())) { + throw new IllegalArgumentException( + "upper value's type should be the same as numericConfig type: " + + upperNumberType + " != " + pointsConfig.getType()); + } + + super.setBounds(lower, upper, lowerInclusive, upperInclusive); + this.numericConfig = pointsConfig; + } + + /** + * Returns the {@link PointsConfig} associated with the lower and upper bounds. + * + * @return the {@link PointsConfig} associated with the lower and upper bounds + */ + public PointsConfig getPointsConfig() { + return this.numericConfig; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("\n"); + sb.append(getLowerBound()).append('\n'); + sb.append(getUpperBound()).append('\n'); + sb.append(""); + return sb.toString(); + } +} diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericQueryNodeProcessor.java similarity index 78% rename from lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericQueryNodeProcessor.java rename to lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericQueryNodeProcessor.java index 10bd6baf8338..8b7182437ce4 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericQueryNodeProcessor.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericQueryNodeProcessor.java @@ -30,19 +30,19 @@ import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; import org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode; import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; -import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig; import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode; /** * This processor is used to convert {@link FieldQueryNode}s to - * {@link NumericRangeQueryNode}s. It looks for - * {@link ConfigurationKeys#NUMERIC_CONFIG} set in the {@link FieldConfig} of + * {@link LegacyNumericRangeQueryNode}s. It looks for + * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} set in the {@link FieldConfig} of * every {@link FieldQueryNode} found. If - * {@link ConfigurationKeys#NUMERIC_CONFIG} is found, it considers that + * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} is found, it considers that * {@link FieldQueryNode} to be a numeric query and convert it to - * {@link NumericRangeQueryNode} with upper and lower inclusive and lower and + * {@link LegacyNumericRangeQueryNode} with upper and lower inclusive and lower and * upper equals to the value represented by the {@link FieldQueryNode} converted * to {@link Number}. It means that field:1 is converted to field:[1 * TO 1].
      @@ -50,17 +50,19 @@ * Note that {@link FieldQueryNode}s children of a * {@link RangeQueryNode} are ignored. * - * @see ConfigurationKeys#NUMERIC_CONFIG + * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG * @see FieldQueryNode - * @see NumericConfig - * @see NumericQueryNode + * @see LegacyNumericConfig + * @see LegacyNumericQueryNode + * @deprecated Index with points and use {@link PointQueryNodeProcessor} instead. */ -public class NumericQueryNodeProcessor extends QueryNodeProcessorImpl { +@Deprecated +public class LegacyNumericQueryNodeProcessor extends QueryNodeProcessorImpl { /** - * Constructs a {@link NumericQueryNodeProcessor} object. + * Constructs a {@link LegacyNumericQueryNodeProcessor} object. */ - public NumericQueryNodeProcessor() { + public LegacyNumericQueryNodeProcessor() { // empty constructor } @@ -78,8 +80,8 @@ protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { .getFieldAsString()); if (fieldConfig != null) { - NumericConfig numericConfig = fieldConfig - .get(ConfigurationKeys.NUMERIC_CONFIG); + LegacyNumericConfig numericConfig = fieldConfig + .get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG); if (numericConfig != null) { @@ -118,12 +120,12 @@ protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { QueryParserMessages.NUMERIC_CANNOT_BE_EMPTY, fieldNode.getFieldAsString())); } - NumericQueryNode lowerNode = new NumericQueryNode(fieldNode + LegacyNumericQueryNode lowerNode = new LegacyNumericQueryNode(fieldNode .getField(), number, numberFormat); - NumericQueryNode upperNode = new NumericQueryNode(fieldNode + LegacyNumericQueryNode upperNode = new LegacyNumericQueryNode(fieldNode .getField(), number, numberFormat); - return new NumericRangeQueryNode(lowerNode, upperNode, true, true, + return new LegacyNumericRangeQueryNode(lowerNode, upperNode, true, true, numericConfig); } diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericRangeQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericRangeQueryNodeProcessor.java similarity index 81% rename from lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericRangeQueryNodeProcessor.java rename to lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericRangeQueryNodeProcessor.java index bbe528452992..5a54b7b42056 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/NumericRangeQueryNodeProcessor.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/LegacyNumericRangeQueryNodeProcessor.java @@ -30,32 +30,34 @@ import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; import org.apache.lucene.queryparser.flexible.core.util.StringUtils; -import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig; import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode; -import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode; import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode; /** * This processor is used to convert {@link TermRangeQueryNode}s to - * {@link NumericRangeQueryNode}s. It looks for - * {@link ConfigurationKeys#NUMERIC_CONFIG} set in the {@link FieldConfig} of + * {@link LegacyNumericRangeQueryNode}s. It looks for + * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} set in the {@link FieldConfig} of * every {@link TermRangeQueryNode} found. If - * {@link ConfigurationKeys#NUMERIC_CONFIG} is found, it considers that + * {@link ConfigurationKeys#LEGACY_NUMERIC_CONFIG} is found, it considers that * {@link TermRangeQueryNode} to be a numeric range query and convert it to - * {@link NumericRangeQueryNode}. + * {@link LegacyNumericRangeQueryNode}. * - * @see ConfigurationKeys#NUMERIC_CONFIG + * @see ConfigurationKeys#LEGACY_NUMERIC_CONFIG * @see TermRangeQueryNode - * @see NumericConfig - * @see NumericRangeQueryNode + * @see LegacyNumericConfig + * @see LegacyNumericRangeQueryNode + * @deprecated Index with points and use {@link PointRangeQueryNodeProcessor} instead. */ -public class NumericRangeQueryNodeProcessor extends QueryNodeProcessorImpl { +@Deprecated +public class LegacyNumericRangeQueryNodeProcessor extends QueryNodeProcessorImpl { /** - * Constructs an empty {@link NumericRangeQueryNode} object. + * Constructs an empty {@link LegacyNumericRangeQueryNode} object. */ - public NumericRangeQueryNodeProcessor() { + public LegacyNumericRangeQueryNodeProcessor() { // empty constructor } @@ -72,8 +74,8 @@ protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { if (fieldConfig != null) { - NumericConfig numericConfig = fieldConfig - .get(ConfigurationKeys.NUMERIC_CONFIG); + LegacyNumericConfig numericConfig = fieldConfig + .get(ConfigurationKeys.LEGACY_NUMERIC_CONFIG); if (numericConfig != null) { @@ -131,15 +133,15 @@ protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { if (lowerNumber != null) lowerNumber = lowerNumber.floatValue(); } - NumericQueryNode lowerNode = new NumericQueryNode( + LegacyNumericQueryNode lowerNode = new LegacyNumericQueryNode( termRangeNode.getField(), lowerNumber, numberFormat); - NumericQueryNode upperNode = new NumericQueryNode( + LegacyNumericQueryNode upperNode = new LegacyNumericQueryNode( termRangeNode.getField(), upperNumber, numberFormat); boolean lowerInclusive = termRangeNode.isLowerInclusive(); boolean upperInclusive = termRangeNode.isUpperInclusive(); - return new NumericRangeQueryNode(lowerNode, upperNode, + return new LegacyNumericRangeQueryNode(lowerNode, upperNode, lowerInclusive, upperInclusive, numericConfig); } diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java new file mode 100644 index 000000000000..81a844961563 --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointQueryNodeProcessor.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard.processors; + +import java.text.NumberFormat; +import java.text.ParseException; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.config.FieldConfig; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode; + +/** + * This processor is used to convert {@link FieldQueryNode}s to + * {@link PointRangeQueryNode}s. It looks for + * {@link ConfigurationKeys#POINTS_CONFIG} set in the {@link FieldConfig} of + * every {@link FieldQueryNode} found. If + * {@link ConfigurationKeys#POINTS_CONFIG} is found, it considers that + * {@link FieldQueryNode} to be a numeric query and convert it to + * {@link PointRangeQueryNode} with upper and lower inclusive and lower and + * upper equals to the value represented by the {@link FieldQueryNode} converted + * to {@link Number}. It means that field:1 is converted to field:[1 + * TO 1].
      + *
      + * Note that {@link FieldQueryNode}s children of a + * {@link RangeQueryNode} are ignored. + * + * @see ConfigurationKeys#POINTS_CONFIG + * @see FieldQueryNode + * @see PointsConfig + * @see PointQueryNode + */ +public class PointQueryNodeProcessor extends QueryNodeProcessorImpl { + + /** + * Constructs a {@link PointQueryNodeProcessor} object. + */ + public PointQueryNodeProcessor() { + // empty constructor + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof FieldQueryNode + && !(node.getParent() instanceof RangeQueryNode)) { + + QueryConfigHandler config = getQueryConfigHandler(); + + if (config != null) { + FieldQueryNode fieldNode = (FieldQueryNode) node; + FieldConfig fieldConfig = config.getFieldConfig(fieldNode + .getFieldAsString()); + + if (fieldConfig != null) { + PointsConfig numericConfig = fieldConfig.get(ConfigurationKeys.POINTS_CONFIG); + + if (numericConfig != null) { + + NumberFormat numberFormat = numericConfig.getNumberFormat(); + String text = fieldNode.getTextAsString(); + Number number = null; + + if (text.length() > 0) { + + try { + number = numberFormat.parse(text); + + } catch (ParseException e) { + throw new QueryNodeParseException(new MessageImpl( + QueryParserMessages.COULD_NOT_PARSE_NUMBER, fieldNode + .getTextAsString(), numberFormat.getClass() + .getCanonicalName()), e); + } + + if (Integer.class.equals(numericConfig.getType())) { + number = number.intValue(); + } else if (Long.class.equals(numericConfig.getType())) { + number = number.longValue(); + } else if (Double.class.equals(numericConfig.getType())) { + number = number.doubleValue(); + } else if (Float.class.equals(numericConfig.getType())) { + number = number.floatValue(); + } + + } else { + throw new QueryNodeParseException(new MessageImpl( + QueryParserMessages.NUMERIC_CANNOT_BE_EMPTY, fieldNode.getFieldAsString())); + } + + PointQueryNode lowerNode = new PointQueryNode(fieldNode.getField(), number, numberFormat); + PointQueryNode upperNode = new PointQueryNode(fieldNode.getField(), number, numberFormat); + + return new PointRangeQueryNode(lowerNode, upperNode, true, true, numericConfig); + } + } + } + } + return node; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) throws QueryNodeException { + return children; + } +} diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java new file mode 100644 index 000000000000..2ffc43735af5 --- /dev/null +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/PointRangeQueryNodeProcessor.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard.processors; + +import java.text.NumberFormat; +import java.text.ParseException; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.config.FieldConfig; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.core.util.StringUtils; +import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode; + +/** + * This processor is used to convert {@link TermRangeQueryNode}s to + * {@link PointRangeQueryNode}s. It looks for + * {@link ConfigurationKeys#POINTS_CONFIG} set in the {@link FieldConfig} of + * every {@link TermRangeQueryNode} found. If + * {@link ConfigurationKeys#POINTS_CONFIG} is found, it considers that + * {@link TermRangeQueryNode} to be a numeric range query and convert it to + * {@link PointRangeQueryNode}. + * + * @see ConfigurationKeys#POINTS_CONFIG + * @see TermRangeQueryNode + * @see PointsConfig + * @see PointRangeQueryNode + */ +public class PointRangeQueryNodeProcessor extends QueryNodeProcessorImpl { + + /** + * Constructs an empty {@link PointRangeQueryNodeProcessor} object. + */ + public PointRangeQueryNodeProcessor() { + // empty constructor + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof TermRangeQueryNode) { + QueryConfigHandler config = getQueryConfigHandler(); + + if (config != null) { + TermRangeQueryNode termRangeNode = (TermRangeQueryNode) node; + FieldConfig fieldConfig = config.getFieldConfig(StringUtils.toString(termRangeNode.getField())); + + if (fieldConfig != null) { + PointsConfig numericConfig = fieldConfig.get(ConfigurationKeys.POINTS_CONFIG); + + if (numericConfig != null) { + FieldQueryNode lower = termRangeNode.getLowerBound(); + FieldQueryNode upper = termRangeNode.getUpperBound(); + + String lowerText = lower.getTextAsString(); + String upperText = upper.getTextAsString(); + NumberFormat numberFormat = numericConfig.getNumberFormat(); + Number lowerNumber = null, upperNumber = null; + + if (lowerText.length() > 0) { + + try { + lowerNumber = numberFormat.parse(lowerText); + + } catch (ParseException e) { + throw new QueryNodeParseException(new MessageImpl( + QueryParserMessages.COULD_NOT_PARSE_NUMBER, lower + .getTextAsString(), numberFormat.getClass() + .getCanonicalName()), e); + } + + } + + if (upperText.length() > 0) { + + try { + upperNumber = numberFormat.parse(upperText); + + } catch (ParseException e) { + throw new QueryNodeParseException(new MessageImpl( + QueryParserMessages.COULD_NOT_PARSE_NUMBER, upper + .getTextAsString(), numberFormat.getClass() + .getCanonicalName()), e); + } + } + + if (Integer.class.equals(numericConfig.getType())) { + if (upperNumber != null) upperNumber = upperNumber.intValue(); + if (lowerNumber != null) lowerNumber = lowerNumber.intValue(); + } else if (Long.class.equals(numericConfig.getType())) { + if (upperNumber != null) upperNumber = upperNumber.longValue(); + if (lowerNumber != null) lowerNumber = lowerNumber.longValue(); + } else if (Double.class.equals(numericConfig.getType())) { + if (upperNumber != null) upperNumber = upperNumber.doubleValue(); + if (lowerNumber != null) lowerNumber = lowerNumber.doubleValue(); + } else if (Float.class.equals(numericConfig.getType())) { + if (upperNumber != null) upperNumber = upperNumber.floatValue(); + if (lowerNumber != null) lowerNumber = lowerNumber.floatValue(); + } + + PointQueryNode lowerNode = new PointQueryNode(termRangeNode.getField(), lowerNumber, numberFormat); + PointQueryNode upperNode = new PointQueryNode(termRangeNode.getField(), upperNumber, numberFormat); + + boolean lowerInclusive = termRangeNode.isLowerInclusive(); + boolean upperInclusive = termRangeNode.isUpperInclusive(); + + return new PointRangeQueryNode(lowerNode, upperNode, lowerInclusive, upperInclusive, numericConfig); + } + } + } + } + return node; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) throws QueryNodeException { + return children; + } +} diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java index 06f38c2b2095..6e4a394f4454 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/StandardQueryNodeProcessorPipeline.java @@ -55,8 +55,10 @@ public StandardQueryNodeProcessorPipeline(QueryConfigHandler queryConfig) { add(new FuzzyQueryNodeProcessor()); add(new MatchAllDocsQueryNodeProcessor()); add(new OpenRangeQueryNodeProcessor()); - add(new NumericQueryNodeProcessor()); - add(new NumericRangeQueryNodeProcessor()); + add(new LegacyNumericQueryNodeProcessor()); + add(new LegacyNumericRangeQueryNodeProcessor()); + add(new PointQueryNodeProcessor()); + add(new PointRangeQueryNodeProcessor()); add(new LowercaseExpandedTermsQueryNodeProcessor()); add(new TermRangeQueryNodeProcessor()); add(new AllowLeadingWildcardProcessor()); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java similarity index 97% rename from lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java rename to lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java index c29573b89a6b..c6ab7f5ffffa 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java @@ -44,7 +44,7 @@ import org.apache.lucene.queryparser.flexible.core.QueryNodeException; import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; import org.apache.lucene.queryparser.flexible.standard.config.NumberDateFormat; -import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig; import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -56,7 +56,7 @@ import org.junit.BeforeClass; import org.junit.Test; -public class TestNumericQueryParser extends LuceneTestCase { +public class TestLegacyNumericQueryParser extends LuceneTestCase { private static enum NumberType { NEGATIVE, ZERO, POSITIVE; @@ -193,12 +193,12 @@ public static void beforeClass() throws Exception { .setMergePolicy(newLogMergePolicy())); Document doc = new Document(); - HashMap numericConfigMap = new HashMap<>(); + HashMap numericConfigMap = new HashMap<>(); HashMap numericFieldMap = new HashMap<>(); - qp.setNumericConfigMap(numericConfigMap); + qp.setLegacyNumericConfigMap(numericConfigMap); for (LegacyNumericType type : LegacyNumericType.values()) { - numericConfigMap.put(type.name(), new NumericConfig(PRECISION_STEP, + numericConfigMap.put(type.name(), new LegacyNumericConfig(PRECISION_STEP, NUMBER_FORMAT, type)); FieldType ft = new FieldType(LegacyIntField.TYPE_NOT_STORED); @@ -229,7 +229,7 @@ public static void beforeClass() throws Exception { doc.add(field); } - numericConfigMap.put(DATE_FIELD_NAME, new NumericConfig(PRECISION_STEP, + numericConfigMap.put(DATE_FIELD_NAME, new LegacyNumericConfig(PRECISION_STEP, DATE_FORMAT, LegacyNumericType.LONG)); FieldType ft = new FieldType(LegacyLongField.TYPE_NOT_STORED); ft.setStored(true); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java new file mode 100644 index 000000000000..323b0ffba9fd --- /dev/null +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestPointQueryParser.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.queryparser.flexible.standard; + +import java.text.NumberFormat; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig; +import org.apache.lucene.util.LuceneTestCase; + +/** Simple test for point field integration into the flexible QP */ +public class TestPointQueryParser extends LuceneTestCase { + + public void testIntegers() throws Exception { + StandardQueryParser parser = new StandardQueryParser(); + Map pointsConfig = new HashMap<>(); + pointsConfig.put("intField", new PointsConfig(NumberFormat.getIntegerInstance(Locale.ROOT), Integer.class)); + parser.setPointsConfigMap(pointsConfig); + + assertEquals(IntPoint.newRangeQuery("intField", 1, 3), + parser.parse("intField:[1 TO 3]", "body")); + assertEquals(IntPoint.newRangeQuery("intField", 1, 1), + parser.parse("intField:1", "body")); + } + + public void testLongs() throws Exception { + StandardQueryParser parser = new StandardQueryParser(); + Map pointsConfig = new HashMap<>(); + pointsConfig.put("longField", new PointsConfig(NumberFormat.getIntegerInstance(Locale.ROOT), Long.class)); + parser.setPointsConfigMap(pointsConfig); + + assertEquals(LongPoint.newRangeQuery("longField", 1, 3), + parser.parse("longField:[1 TO 3]", "body")); + assertEquals(LongPoint.newRangeQuery("longField", 1, 1), + parser.parse("longField:1", "body")); + } + + public void testFloats() throws Exception { + StandardQueryParser parser = new StandardQueryParser(); + Map pointsConfig = new HashMap<>(); + pointsConfig.put("floatField", new PointsConfig(NumberFormat.getNumberInstance(Locale.ROOT), Float.class)); + parser.setPointsConfigMap(pointsConfig); + + assertEquals(FloatPoint.newRangeQuery("floatField", 1.5F, 3.6F), + parser.parse("floatField:[1.5 TO 3.6]", "body")); + assertEquals(FloatPoint.newRangeQuery("floatField", 1.5F, 1.5F), + parser.parse("floatField:1.5", "body")); + } + + public void testDoubles() throws Exception { + StandardQueryParser parser = new StandardQueryParser(); + Map pointsConfig = new HashMap<>(); + pointsConfig.put("doubleField", new PointsConfig(NumberFormat.getNumberInstance(Locale.ROOT), Double.class)); + parser.setPointsConfigMap(pointsConfig); + + assertEquals(DoublePoint.newRangeQuery("doubleField", 1.5D, 3.6D), + parser.parse("doubleField:[1.5 TO 3.6]", "body")); + assertEquals(DoublePoint.newRangeQuery("floatField", 1.5D, 1.5D), + parser.parse("doubleField:1.5", "body")); + } + +} diff --git a/lucene/tools/junit4/cached-timehints.txt b/lucene/tools/junit4/cached-timehints.txt index 9c8b22a390fb..f2b8974a600a 100644 --- a/lucene/tools/junit4/cached-timehints.txt +++ b/lucene/tools/junit4/cached-timehints.txt @@ -548,7 +548,7 @@ org.apache.lucene.queryparser.flexible.spans.TestSpanQueryParser=711,339,113,55, org.apache.lucene.queryparser.flexible.spans.TestSpanQueryParserSimpleSample=51,644,92,32,129,60,21 org.apache.lucene.queryparser.flexible.standard.TestMultiAnalyzerQPHelper=44,84,87,34,260,35,229 org.apache.lucene.queryparser.flexible.standard.TestMultiFieldQPHelper=1152,1779,1888,384,179,1665,445 -org.apache.lucene.queryparser.flexible.standard.TestNumericQueryParser=344,496,451,1373,733,1420,367 +org.apache.lucene.queryparser.flexible.standard.TestLegacyNumericQueryParser=344,496,451,1373,733,1420,367 org.apache.lucene.queryparser.flexible.standard.TestQPHelper=287,676,989,2137,860,586,612 org.apache.lucene.queryparser.flexible.standard.TestStandardQP=1643,430,2182,2193,600,1506,741 org.apache.lucene.queryparser.surround.query.SrndQueryTest=1062,92,92,212,65,95,113 From 7b03e6e02a92878c540ec754fa504dd75fc631a9 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 10 Mar 2016 07:25:48 -0500 Subject: [PATCH 0090/1113] LUCENE-7081: prefix-compress compressible fixed-width data (like InetAddress/BigInteger) --- .../lucene54/Lucene54DocValuesConsumer.java | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java index 858c54b362f9..96acfd25b5ac 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesConsumer.java @@ -411,17 +411,32 @@ public void addBinaryField(FieldInfo field, Iterable values) throws IO /** expert: writes a value dictionary for a sorted/sortedset field */ private void addTermsDict(FieldInfo field, final Iterable values) throws IOException { - // first check if it's a "fixed-length" terms dict + // first check if it's a "fixed-length" terms dict, and compressibility if so int minLength = Integer.MAX_VALUE; int maxLength = Integer.MIN_VALUE; long numValues = 0; + BytesRefBuilder previousValue = new BytesRefBuilder(); + long prefixSum = 0; // only valid for fixed-width data, as we have a choice there for (BytesRef v : values) { minLength = Math.min(minLength, v.length); maxLength = Math.max(maxLength, v.length); + if (minLength == maxLength) { + int termPosition = (int) (numValues & INTERVAL_MASK); + if (termPosition == 0) { + // first term in block, save it away to compare against the last term later + previousValue.copyBytes(v); + } else if (termPosition == INTERVAL_COUNT - 1) { + // last term in block, accumulate shared prefix against first term + prefixSum += StringHelper.bytesDifference(previousValue.get(), v); + } + } numValues++; } - if (minLength == maxLength) { - // no index needed: direct addressing by mult + // for fixed width data, look at the avg(shared prefix) before deciding how to encode: + // prefix compression "costs" worst case 2 bytes per term because we must store suffix lengths. + // so if we share at least 3 bytes on average, always compress. + if (minLength == maxLength && prefixSum <= 3*(numValues >> INTERVAL_SHIFT)) { + // no index needed: not very compressible, direct addressing by mult addBinaryField(field, values); } else if (numValues < REVERSE_INTERVAL_COUNT) { // low cardinality: waste a few KB of ram, but can't really use fancy index etc From 02b0dd52ec115c942f95124ccc2ec1e2a461ccd9 Mon Sep 17 00:00:00 2001 From: Shalin Shekhar Mangar Date: Thu, 10 Mar 2016 18:16:43 +0530 Subject: [PATCH 0091/1113] Fix javadocs for ClusterState.getCollectionOrNull (cherry picked from commit 66cd070) --- .../src/java/org/apache/solr/common/cloud/ClusterState.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java index ff0e6a3965f9..2495c41061a3 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java @@ -180,9 +180,9 @@ public CollectionRef getCollectionRef(String coll) { * if such a collection exists. Returns null otherwise. * * Implementation note: This method resolves the collection reference by calling - * {@link CollectionRef#get()} which can make a call to ZooKeeper. This is necessary + * {@link CollectionRef#get()} which may make a call to ZooKeeper. This is necessary * because the semantics of how collection list is loaded have changed in SOLR-6629. - * Please javadocs in {@link ZkStateReader#refreshCollectionList(Watcher)} + * Please see javadocs in {@link ZkStateReader#refreshCollectionList(Watcher)} */ public DocCollection getCollectionOrNull(String collectionName) { CollectionRef ref = collectionStates.get(collectionName); From 73db4cab6a7f4948e803f649759781c84fbb3e3f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 10 Mar 2016 09:12:38 +0100 Subject: [PATCH 0092/1113] LUCENE-7087: Let MemoryIndex#fromDocument(...) accept 'Iterable' as document instead of 'Document' --- lucene/CHANGES.txt | 5 +++++ .../java/org/apache/lucene/index/memory/MemoryIndex.java | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 3abd4fb5e80a..832f81971246 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -11,6 +11,11 @@ Optimizations * LUCENE-7071: Reduce bytes copying in OfflineSorter, giving ~10% speedup on merging 2D LatLonPoint values (Mike McCandless) +Other + +* LUCENE-7087: Let MemoryIndex#fromDocument(...) accept 'Iterable' + as document instead of 'Document'. (Martijn van Groningen) + ======================= Lucene 6.0.0 ======================= System Requirements diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java index 849cd6338685..9e01182ec902 100644 --- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java +++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java @@ -265,7 +265,7 @@ public void addField(String fieldName, String text, Analyzer analyzer) { * @param analyzer the analyzer to use * @return a MemoryIndex */ - public static MemoryIndex fromDocument(Document document, Analyzer analyzer) { + public static MemoryIndex fromDocument(Iterable document, Analyzer analyzer) { return fromDocument(document, analyzer, false, false, 0); } @@ -277,7 +277,7 @@ public static MemoryIndex fromDocument(Document document, Analyzer analyzer) { * @param storePayloads true if payloads should be stored * @return a MemoryIndex */ - public static MemoryIndex fromDocument(Document document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads) { + public static MemoryIndex fromDocument(Iterable document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads) { return fromDocument(document, analyzer, storeOffsets, storePayloads, 0); } @@ -290,7 +290,7 @@ public static MemoryIndex fromDocument(Document document, Analyzer analyzer, boo * @param maxReusedBytes the number of bytes that should remain in the internal memory pools after {@link #reset()} is called * @return a MemoryIndex */ - public static MemoryIndex fromDocument(Document document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads, long maxReusedBytes) { + public static MemoryIndex fromDocument(Iterable document, Analyzer analyzer, boolean storeOffsets, boolean storePayloads, long maxReusedBytes) { MemoryIndex mi = new MemoryIndex(storeOffsets, storePayloads, maxReusedBytes); for (IndexableField field : document) { mi.addField(field, analyzer); From a3f72fcdec2188900bb511a710d4714d5053ea40 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 10 Mar 2016 08:28:51 -0500 Subject: [PATCH 0093/1113] LUCENE-7090, LUCENE-7075: deprecate single-valued LegacyNumerics fieldcaching, provide Points-based replacement. --- .../apache/lucene/uninverting/FieldCache.java | 113 ++- .../lucene/uninverting/FieldCacheImpl.java | 164 ++++- .../lucene/uninverting/UninvertingReader.java | 97 ++- .../lucene/uninverting/TestDocTermOrds.java | 2 +- .../lucene/uninverting/TestFieldCache.java | 141 ++-- .../uninverting/TestFieldCacheReopen.java | 9 +- .../TestFieldCacheSanityChecker.java | 10 +- .../uninverting/TestFieldCacheSort.java | 684 ++++++++++++++++-- .../uninverting/TestFieldCacheSortRandom.java | 8 +- .../TestFieldCacheVsDocValues.java | 4 +- .../TestFieldCacheWithThreads.java | 11 +- .../uninverting/TestLegacyFieldCache.java | 498 +++++++++++++ .../uninverting/TestNumericTerms32.java | 6 +- .../uninverting/TestNumericTerms64.java | 8 +- .../uninverting/TestUninvertingReader.java | 6 +- .../lucene/spatial/SpatialTestCase.java | 4 +- .../org/apache/solr/schema/EnumField.java | 2 +- .../org/apache/solr/schema/TrieField.java | 8 +- 18 files changed, 1575 insertions(+), 200 deletions(-) create mode 100644 lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java index 314d6aa011cd..27d68e04abb0 100644 --- a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java +++ b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCache.java @@ -32,6 +32,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LegacyNumericUtils; +import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.RamUsageEstimator; /** @@ -71,21 +72,101 @@ public interface Parser { * @param terms the {@link Terms} instance to create the {@link TermsEnum} from. * @return a possibly filtered {@link TermsEnum} instance, this method must not return null. * @throws IOException if an {@link IOException} occurs + * @deprecated index with Points instead */ + @Deprecated public TermsEnum termsEnum(Terms terms) throws IOException; /** Parse's this field's value */ public long parseValue(BytesRef term); } + + /** + * Base class for points parsers. These parsers do not use the inverted index, but instead + * uninvert point data. + * + * This abstraction can be cleaned up when Parser.termsEnum is removed. + */ + public abstract class PointParser implements Parser { + public final TermsEnum termsEnum(Terms terms) throws IOException { + throw new UnsupportedOperationException("makes no sense for parsing points"); + } + } /** Expert: The cache used internally by sorting and range query classes. */ public static FieldCache DEFAULT = new FieldCacheImpl(); + /** + * A parser instance for int values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed + * via {@link org.apache.lucene.document.IntPoint}. + */ + public static final Parser INT_POINT_PARSER = new PointParser() { + @Override + public long parseValue(BytesRef point) { + return NumericUtils.sortableBytesToInt(point.bytes, point.offset); + } + + @Override + public String toString() { + return FieldCache.class.getName()+".INT_POINT_PARSER"; + } + }; + + /** + * A parser instance for long values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed + * via {@link org.apache.lucene.document.LongPoint}. + */ + public static final Parser LONG_POINT_PARSER = new PointParser() { + @Override + public long parseValue(BytesRef point) { + return NumericUtils.sortableBytesToLong(point.bytes, point.offset); + } + + @Override + public String toString() { + return FieldCache.class.getName()+".LONG_POINT_PARSER"; + } + }; + + /** + * A parser instance for float values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed + * via {@link org.apache.lucene.document.FloatPoint}. + */ + public static final Parser FLOAT_POINT_PARSER = new PointParser() { + @Override + public long parseValue(BytesRef point) { + return NumericUtils.sortableFloatBits(NumericUtils.sortableBytesToInt(point.bytes, point.offset)); + } + + @Override + public String toString() { + return FieldCache.class.getName()+".FLOAT_POINT_PARSER"; + } + }; + + /** + * A parser instance for double values encoded by {@link org.apache.lucene.util.NumericUtils}, e.g. when indexed + * via {@link org.apache.lucene.document.DoublePoint}. + */ + public static final Parser DOUBLE_POINT_PARSER = new PointParser() { + @Override + public long parseValue(BytesRef point) { + return NumericUtils.sortableDoubleBits(NumericUtils.sortableBytesToLong(point.bytes, point.offset)); + } + + @Override + public String toString() { + return FieldCache.class.getName()+".DOUBLE_POINT_PARSER"; + } + }; + /** * A parser instance for int values encoded by {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed * via {@link org.apache.lucene.document.LegacyIntField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}. + * @deprecated Index with points and use {@link #INT_POINT_PARSER} instead. */ - public static final Parser NUMERIC_UTILS_INT_PARSER = new Parser() { + @Deprecated + public static final Parser LEGACY_INT_PARSER = new Parser() { @Override public long parseValue(BytesRef term) { return LegacyNumericUtils.prefixCodedToInt(term); @@ -98,15 +179,17 @@ public TermsEnum termsEnum(Terms terms) throws IOException { @Override public String toString() { - return FieldCache.class.getName()+".NUMERIC_UTILS_INT_PARSER"; + return FieldCache.class.getName()+".LEGACY_INT_PARSER"; } }; /** * A parser instance for float values encoded with {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed * via {@link org.apache.lucene.document.LegacyFloatField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}. + * @deprecated Index with points and use {@link #FLOAT_POINT_PARSER} instead. */ - public static final Parser NUMERIC_UTILS_FLOAT_PARSER = new Parser() { + @Deprecated + public static final Parser LEGACY_FLOAT_PARSER = new Parser() { @Override public long parseValue(BytesRef term) { int val = LegacyNumericUtils.prefixCodedToInt(term); @@ -116,7 +199,7 @@ public long parseValue(BytesRef term) { @Override public String toString() { - return FieldCache.class.getName()+".NUMERIC_UTILS_FLOAT_PARSER"; + return FieldCache.class.getName()+".LEGACY_FLOAT_PARSER"; } @Override @@ -128,15 +211,17 @@ public TermsEnum termsEnum(Terms terms) throws IOException { /** * A parser instance for long values encoded by {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed * via {@link org.apache.lucene.document.LegacyLongField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}. + * @deprecated Index with points and use {@link #LONG_POINT_PARSER} instead. */ - public static final Parser NUMERIC_UTILS_LONG_PARSER = new Parser() { + @Deprecated + public static final Parser LEGACY_LONG_PARSER = new Parser() { @Override public long parseValue(BytesRef term) { return LegacyNumericUtils.prefixCodedToLong(term); } @Override public String toString() { - return FieldCache.class.getName()+".NUMERIC_UTILS_LONG_PARSER"; + return FieldCache.class.getName()+".LEGACY_LONG_PARSER"; } @Override @@ -148,8 +233,10 @@ public TermsEnum termsEnum(Terms terms) throws IOException { /** * A parser instance for double values encoded with {@link org.apache.lucene.util.LegacyNumericUtils}, e.g. when indexed * via {@link org.apache.lucene.document.LegacyDoubleField}/{@link org.apache.lucene.analysis.LegacyNumericTokenStream}. + * @deprecated Index with points and use {@link #DOUBLE_POINT_PARSER} instead. */ - public static final Parser NUMERIC_UTILS_DOUBLE_PARSER = new Parser() { + @Deprecated + public static final Parser LEGACY_DOUBLE_PARSER = new Parser() { @Override public long parseValue(BytesRef term) { long val = LegacyNumericUtils.prefixCodedToLong(term); @@ -158,7 +245,7 @@ public long parseValue(BytesRef term) { } @Override public String toString() { - return FieldCache.class.getName()+".NUMERIC_UTILS_DOUBLE_PARSER"; + return FieldCache.class.getName()+".LEGACY_DOUBLE_PARSER"; } @Override @@ -168,18 +255,20 @@ public TermsEnum termsEnum(Terms terms) throws IOException { }; /** Checks the internal cache for an appropriate entry, and if none is found, - * reads the terms in field and returns a bit set at the size of + * reads the terms/points in field and returns a bit set at the size of * reader.maxDoc(), with turned on bits for each docid that * does have a value for this field. + * @param parser May be {@code null} if coming from the inverted index, otherwise + * can be a {@link PointParser} to compute from point values. */ - public Bits getDocsWithField(LeafReader reader, String field) throws IOException; + public Bits getDocsWithField(LeafReader reader, String field, Parser parser) throws IOException; /** * Returns a {@link NumericDocValues} over the values found in documents in the given * field. If the field was indexed as {@link NumericDocValuesField}, it simply * uses {@link org.apache.lucene.index.LeafReader#getNumericDocValues(String)} to read the values. * Otherwise, it checks the internal cache for an appropriate entry, and if - * none is found, reads the terms in field as longs and returns + * none is found, reads the terms/points in field as longs and returns * an array of size reader.maxDoc() of the value each document * has in the given field. * @@ -199,7 +288,7 @@ public TermsEnum termsEnum(Terms terms) throws IOException { * If any error occurs. */ public NumericDocValues getNumerics(LeafReader reader, String field, Parser parser, boolean setDocsWithField) throws IOException; - + /** Checks the internal cache for an appropriate entry, and if none * is found, reads the term values in field * and returns a {@link BinaryDocValues} instance, providing a diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java index c6bd943725de..589da132d29c 100644 --- a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java +++ b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java @@ -34,6 +34,9 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; @@ -262,8 +265,68 @@ public int hashCode() { private static abstract class Uninvert { public Bits docsWithField; + final boolean points; + + // pass true to pull from points, otherwise postings. + Uninvert(boolean points) { + this.points = points; + } + + final void uninvert(LeafReader reader, String field, boolean setDocsWithField) throws IOException { + if (points) { + uninvertPoints(reader, field, setDocsWithField); + } else { + uninvertPostings(reader, field, setDocsWithField); + } + } + + final void uninvertPoints(LeafReader reader, String field, boolean setDocsWithField) throws IOException { + final int maxDoc = reader.maxDoc(); + PointValues values = reader.getPointValues(); + assert values != null; + assert values.size(field) > 0; + + if (setDocsWithField) { + final int docCount = values.getDocCount(field); + assert docCount <= maxDoc; + if (docCount == maxDoc) { + // Fast case: all docs have this field: + this.docsWithField = new Bits.MatchAllBits(maxDoc); + setDocsWithField = false; + } + } + + final boolean doDocsWithField = setDocsWithField; + BytesRef scratch = new BytesRef(); + values.intersect(field, new IntersectVisitor() { + @Override + public void visit(int docID) throws IOException { + throw new AssertionError(); + } + + @Override + public void visit(int docID, byte[] packedValue) throws IOException { + scratch.bytes = packedValue; + scratch.length = packedValue.length; + visitTerm(scratch); + visitDoc(docID); + if (doDocsWithField) { + if (docsWithField == null) { + // Lazy init + docsWithField = new FixedBitSet(maxDoc); + } + ((FixedBitSet)docsWithField).set(docID); + } + } - public void uninvert(LeafReader reader, String field, boolean setDocsWithField) throws IOException { + @Override + public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + return Relation.CELL_CROSSES_QUERY; // inspect all byte-docid pairs + } + }); + } + + final void uninvertPostings(LeafReader reader, String field, boolean setDocsWithField) throws IOException { final int maxDoc = reader.maxDoc(); Terms terms = reader.terms(field); if (terms != null) { @@ -306,13 +369,15 @@ public void uninvert(LeafReader reader, String field, boolean setDocsWithField) } } + /** @deprecated remove this when legacy numerics are removed */ + @Deprecated protected abstract TermsEnum termsEnum(Terms terms) throws IOException; protected abstract void visitTerm(BytesRef term); protected abstract void visitDoc(int docID); } // null Bits means no docs matched - void setDocsWithField(LeafReader reader, String field, Bits docsWithField) { + void setDocsWithField(LeafReader reader, String field, Bits docsWithField, Parser parser) { final int maxDoc = reader.maxDoc(); final Bits bits; if (docsWithField == null) { @@ -329,7 +394,7 @@ void setDocsWithField(LeafReader reader, String field, Bits docsWithField) { } else { bits = docsWithField; } - caches.get(DocsWithFieldCache.class).put(reader, new CacheKey(field, null), new BitsEntry(bits)); + caches.get(DocsWithFieldCache.class).put(reader, new CacheKey(field, parser), new BitsEntry(bits)); } private static class HoldsOneThing { @@ -353,17 +418,25 @@ private static class GrowableWriterAndMinValue { public long minValue; } - public Bits getDocsWithField(LeafReader reader, String field) throws IOException { + public Bits getDocsWithField(LeafReader reader, String field, Parser parser) throws IOException { final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); if (fieldInfo == null) { // field does not exist or has no value return new Bits.MatchNoBits(reader.maxDoc()); } else if (fieldInfo.getDocValuesType() != DocValuesType.NONE) { return reader.getDocsWithField(field); - } else if (fieldInfo.getIndexOptions() == IndexOptions.NONE) { - return new Bits.MatchNoBits(reader.maxDoc()); + } + + if (parser instanceof PointParser) { + // points case + + } else { + // postings case + if (fieldInfo.getIndexOptions() == IndexOptions.NONE) { + return new Bits.MatchNoBits(reader.maxDoc()); + } } - BitsEntry bitsEntry = (BitsEntry) caches.get(DocsWithFieldCache.class).get(reader, new CacheKey(field, null), false); + BitsEntry bitsEntry = (BitsEntry) caches.get(DocsWithFieldCache.class).get(reader, new CacheKey(field, parser), false); return bitsEntry.bits; } @@ -391,9 +464,48 @@ static final class DocsWithFieldCache extends Cache { } @Override - protected BitsEntry createValue(LeafReader reader, CacheKey key, boolean setDocsWithField /* ignored */) - throws IOException { + protected BitsEntry createValue(LeafReader reader, CacheKey key, boolean setDocsWithField /* ignored */) throws IOException { final String field = key.field; + final Parser parser = (Parser) key.custom; + if (parser instanceof PointParser) { + return createValuePoints(reader, field); + } else { + return createValuePostings(reader, field); + } + } + + private BitsEntry createValuePoints(LeafReader reader, String field) throws IOException { + final int maxDoc = reader.maxDoc(); + PointValues values = reader.getPointValues(); + assert values != null; + assert values.size(field) > 0; + + final int docCount = values.getDocCount(field); + assert docCount <= maxDoc; + if (docCount == maxDoc) { + // Fast case: all docs have this field: + return new BitsEntry(new Bits.MatchAllBits(maxDoc)); + } + + // otherwise a no-op uninvert! + Uninvert u = new Uninvert(true) { + @Override + protected TermsEnum termsEnum(Terms terms) throws IOException { + throw new AssertionError(); + } + + @Override + protected void visitTerm(BytesRef term) {} + + @Override + protected void visitDoc(int docID) {} + }; + u.uninvert(reader, field, true); + return new BitsEntry(u.docsWithField); + } + + // TODO: it is dumb that uninverting code is duplicated here in this method!! + private BitsEntry createValuePostings(LeafReader reader, String field) throws IOException { final int maxDoc = reader.maxDoc(); // Visit all docs that have terms for this field @@ -458,8 +570,32 @@ public NumericDocValues getNumerics(LeafReader reader, String field, Parser pars return DocValues.emptyNumeric(); } else if (info.getDocValuesType() != DocValuesType.NONE) { throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType()); - } else if (info.getIndexOptions() == IndexOptions.NONE) { - return DocValues.emptyNumeric(); + } + + if (parser instanceof PointParser) { + // points case + // no points in this segment + if (info.getPointDimensionCount() == 0) { + return DocValues.emptyNumeric(); + } + if (info.getPointDimensionCount() != 1) { + throw new IllegalStateException("Type mismatch: " + field + " was indexed with dimensions=" + info.getPointDimensionCount()); + } + PointValues values = reader.getPointValues(); + // no actual points for this field (e.g. all points deleted) + if (values == null || values.size(field) == 0) { + return DocValues.emptyNumeric(); + } + // not single-valued + if (values.size(field) != values.getDocCount(field)) { + throw new IllegalStateException("Type mismatch: " + field + " was indexed with multiple values, numValues=" + values.size(field) + ",numDocs=" + values.getDocCount(field)); + } + } else { + // postings case + // not indexed + if (info.getIndexOptions() == IndexOptions.NONE) { + return DocValues.emptyNumeric(); + } } return (NumericDocValues) caches.get(Long.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField); } @@ -498,7 +634,7 @@ protected Accountable createValue(final LeafReader reader, CacheKey key, boolean final HoldsOneThing valuesRef = new HoldsOneThing<>(); - Uninvert u = new Uninvert() { + Uninvert u = new Uninvert(parser instanceof PointParser) { private long minValue; private long currentValue; private GrowableWriter values; @@ -542,7 +678,7 @@ protected TermsEnum termsEnum(Terms terms) throws IOException { u.uninvert(reader, key.field, setDocsWithField); if (setDocsWithField) { - wrapper.setDocsWithField(reader, key.field, u.docsWithField); + wrapper.setDocsWithField(reader, key.field, u.docsWithField, parser); } GrowableWriterAndMinValue values = valuesRef.get(); if (values == null) { @@ -872,7 +1008,7 @@ public boolean get(int index) { public int length() { return maxDoc; } - }); + }, null); } // maybe an int-only impl? return new BinaryDocValuesImpl(bytes.freeze(true), offsetReader); diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java b/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java index 9f96b4f77447..10d1a5b9b3c1 100644 --- a/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java +++ b/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java @@ -57,34 +57,70 @@ public class UninvertingReader extends FilterLeafReader { * Specifies the type of uninversion to apply for the field. */ public static enum Type { + /** + * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.IntPoint}) + *

      + * Fields with this type act as if they were indexed with + * {@link NumericDocValuesField}. + */ + INTEGER_POINT, + /** + * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.LongPoint}) + *

      + * Fields with this type act as if they were indexed with + * {@link NumericDocValuesField}. + */ + LONG_POINT, + /** + * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.FloatPoint}) + *

      + * Fields with this type act as if they were indexed with + * {@link NumericDocValuesField}. + */ + FLOAT_POINT, + /** + * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.DoublePoint}) + *

      + * Fields with this type act as if they were indexed with + * {@link NumericDocValuesField}. + */ + DOUBLE_POINT, /** * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.LegacyIntField}) *

      * Fields with this type act as if they were indexed with * {@link NumericDocValuesField}. + * @deprecated Index with points and use {@link #INTEGER_POINT} instead. */ - INTEGER, + @Deprecated + LEGACY_INTEGER, /** * Single-valued Long, (e.g. indexed with {@link org.apache.lucene.document.LegacyLongField}) *

      * Fields with this type act as if they were indexed with * {@link NumericDocValuesField}. + * @deprecated Index with points and use {@link #LONG_POINT} instead. */ - LONG, + @Deprecated + LEGACY_LONG, /** * Single-valued Float, (e.g. indexed with {@link org.apache.lucene.document.LegacyFloatField}) *

      * Fields with this type act as if they were indexed with * {@link NumericDocValuesField}. + * @deprecated Index with points and use {@link #FLOAT_POINT} instead. */ - FLOAT, + @Deprecated + LEGACY_FLOAT, /** * Single-valued Double, (e.g. indexed with {@link org.apache.lucene.document.LegacyDoubleField}) *

      * Fields with this type act as if they were indexed with * {@link NumericDocValuesField}. + * @deprecated Index with points and use {@link #DOUBLE_POINT} instead. */ - DOUBLE, + @Deprecated + LEGACY_DOUBLE, /** * Single-valued Binary, (e.g. indexed with {@link StringField}) *

      @@ -181,14 +217,29 @@ public UninvertingReader(LeafReader in, Map mapping) { ArrayList filteredInfos = new ArrayList<>(); for (FieldInfo fi : in.getFieldInfos()) { DocValuesType type = fi.getDocValuesType(); - if (fi.getIndexOptions() != IndexOptions.NONE && fi.getDocValuesType() == DocValuesType.NONE) { + if (type == DocValuesType.NONE) { Type t = mapping.get(fi.name); if (t != null) { + if (t == Type.INTEGER_POINT || t == Type.LONG_POINT || t == Type.FLOAT_POINT || t == Type.DOUBLE_POINT) { + // type uses points + if (fi.getPointDimensionCount() == 0) { + continue; + } + } else { + // type uses inverted index + if (fi.getIndexOptions() == IndexOptions.NONE) { + continue; + } + } switch(t) { - case INTEGER: - case LONG: - case FLOAT: - case DOUBLE: + case INTEGER_POINT: + case LONG_POINT: + case FLOAT_POINT: + case DOUBLE_POINT: + case LEGACY_INTEGER: + case LEGACY_LONG: + case LEGACY_FLOAT: + case LEGACY_DOUBLE: type = DocValuesType.NUMERIC; break; case BINARY: @@ -226,10 +277,14 @@ public NumericDocValues getNumericDocValues(String field) throws IOException { Type v = getType(field); if (v != null) { switch (v) { - case INTEGER: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_INT_PARSER, true); - case FLOAT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true); - case LONG: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_LONG_PARSER, true); - case DOUBLE: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true); + case INTEGER_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.INT_POINT_PARSER, true); + case FLOAT_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.FLOAT_POINT_PARSER, true); + case LONG_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LONG_POINT_PARSER, true); + case DOUBLE_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.DOUBLE_POINT_PARSER, true); + case LEGACY_INTEGER: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_INT_PARSER, true); + case LEGACY_FLOAT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_FLOAT_PARSER, true); + case LEGACY_LONG: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_LONG_PARSER, true); + case LEGACY_DOUBLE: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_DOUBLE_PARSER, true); } } return super.getNumericDocValues(field); @@ -275,8 +330,20 @@ public SortedSetDocValues getSortedSetDocValues(String field) throws IOException @Override public Bits getDocsWithField(String field) throws IOException { - if (getType(field) != null) { - return FieldCache.DEFAULT.getDocsWithField(in, field); + Type v = getType(field); + if (v != null) { + switch (v) { + case INTEGER_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.INT_POINT_PARSER); + case FLOAT_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.FLOAT_POINT_PARSER); + case LONG_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LONG_POINT_PARSER); + case DOUBLE_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.DOUBLE_POINT_PARSER); + case LEGACY_INTEGER: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_INT_PARSER); + case LEGACY_FLOAT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_FLOAT_PARSER); + case LEGACY_LONG: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_LONG_PARSER); + case LEGACY_DOUBLE: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_DOUBLE_PARSER); + default: + return FieldCache.DEFAULT.getDocsWithField(in, field, null); + } } else { return in.getDocsWithField(field); } diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java index 8c1fae7b646b..4861cd35ca2f 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java @@ -353,7 +353,7 @@ private void verify(LeafReader r, int[][] idToOrds, BytesRef[] termsArray, Bytes TestUtil.nextInt(random(), 2, 10)); - final NumericDocValues docIDToID = FieldCache.DEFAULT.getNumerics(r, "id", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + final NumericDocValues docIDToID = FieldCache.DEFAULT.getNumerics(r, "id", FieldCache.LEGACY_INT_PARSER, false); /* for(int docID=0;docID { - FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.INT_POINT_PARSER, false); }); BinaryDocValues binary = FieldCache.DEFAULT.getTerms(ar, "binary", true); @@ -460,12 +442,12 @@ public void testDocValuesIntegration() throws Exception { new DocTermOrds(ar, null, "binary"); }); - Bits bits = FieldCache.DEFAULT.getDocsWithField(ar, "binary"); + Bits bits = FieldCache.DEFAULT.getDocsWithField(ar, "binary", null); assertTrue(bits.get(0)); // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds() expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.INT_POINT_PARSER, false); }); expectThrows(IllegalStateException.class, () -> { @@ -488,11 +470,11 @@ public void testDocValuesIntegration() throws Exception { assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd()); assertEquals(1, sortedSet.getValueCount()); - bits = FieldCache.DEFAULT.getDocsWithField(ar, "sorted"); + bits = FieldCache.DEFAULT.getDocsWithField(ar, "sorted", null); assertTrue(bits.get(0)); // Numeric type: can be retrieved via getInts() and so on - NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.INT_POINT_PARSER, false); assertEquals(42, numeric.get(0)); expectThrows(IllegalStateException.class, () -> { @@ -511,12 +493,12 @@ public void testDocValuesIntegration() throws Exception { new DocTermOrds(ar, null, "numeric"); }); - bits = FieldCache.DEFAULT.getDocsWithField(ar, "numeric"); + bits = FieldCache.DEFAULT.getDocsWithField(ar, "numeric", null); assertTrue(bits.get(0)); // SortedSet type: can be retrieved via getDocTermOrds() expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.INT_POINT_PARSER, false); }); expectThrows(IllegalStateException.class, () -> { @@ -538,7 +520,7 @@ public void testDocValuesIntegration() throws Exception { assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd()); assertEquals(2, sortedSet.getValueCount()); - bits = FieldCache.DEFAULT.getDocsWithField(ar, "sortedset"); + bits = FieldCache.DEFAULT.getDocsWithField(ar, "sortedset", null); assertTrue(bits.get(0)); ir.close(); @@ -559,16 +541,16 @@ public void testNonexistantFields() throws Exception { cache.purgeAllCaches(); assertEquals(0, cache.getCacheEntries().length); - NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.NUMERIC_UTILS_INT_PARSER, true); + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER, true); assertEquals(0, ints.get(0)); - NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.NUMERIC_UTILS_LONG_PARSER, true); + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER, true); assertEquals(0, longs.get(0)); - NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true); + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER, true); assertEquals(0, floats.get(0)); - NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true); + NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER, true); assertEquals(0, doubles.get(0)); BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true); @@ -584,7 +566,7 @@ public void testNonexistantFields() throws Exception { sortedSet.setDocument(0); assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd()); - Bits bits = cache.getDocsWithField(ar, "bogusbits"); + Bits bits = cache.getDocsWithField(ar, "bogusbits", null); assertFalse(bits.get(0)); // check that we cached nothing @@ -617,16 +599,16 @@ public void testNonIndexedFields() throws Exception { cache.purgeAllCaches(); assertEquals(0, cache.getCacheEntries().length); - NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.NUMERIC_UTILS_INT_PARSER, true); + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER, true); assertEquals(0, ints.get(0)); - NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.NUMERIC_UTILS_LONG_PARSER, true); + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER, true); assertEquals(0, longs.get(0)); - NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true); + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER, true); assertEquals(0, floats.get(0)); - NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true); + NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER, true); assertEquals(0, doubles.get(0)); BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true); @@ -642,7 +624,7 @@ public void testNonIndexedFields() throws Exception { sortedSet.setDocument(0); assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd()); - Bits bits = cache.getDocsWithField(ar, "bogusbits"); + Bits bits = cache.getDocsWithField(ar, "bogusbits", null); assertFalse(bits.get(0)); // check that we cached nothing @@ -658,8 +640,10 @@ public void testLongFieldCache() throws IOException { cfg.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg); Document doc = new Document(); - LegacyLongField field = new LegacyLongField("f", 0L, Store.YES); + LongPoint field = new LongPoint("f", 0L); + StoredField field2 = new StoredField("f", 0L); doc.add(field); + doc.add(field2); final long[] values = new long[TestUtil.nextInt(random(), 1, 10)]; for (int i = 0; i < values.length; ++i) { final long v; @@ -683,12 +667,13 @@ public void testLongFieldCache() throws IOException { iw.addDocument(new Document()); } else { field.setLongValue(v); + field2.setLongValue(v); iw.addDocument(doc); } } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.NUMERIC_UTILS_LONG_PARSER, false); + final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LONG_POINT_PARSER, false); for (int i = 0; i < values.length; ++i) { assertEquals(values[i], longs.get(i)); } @@ -704,7 +689,7 @@ public void testIntFieldCache() throws IOException { cfg.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg); Document doc = new Document(); - LegacyIntField field = new LegacyIntField("f", 0, Store.YES); + IntPoint field = new IntPoint("f", 0); doc.add(field); final int[] values = new int[TestUtil.nextInt(random(), 1, 10)]; for (int i = 0; i < values.length; ++i) { @@ -734,7 +719,7 @@ public void testIntFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.INT_POINT_PARSER, false); for (int i = 0; i < values.length; ++i) { assertEquals(values[i], ints.get(i)); } diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java index a85731ff4d01..0d5584e4544e 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java @@ -18,8 +18,7 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.LegacyIntField; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -42,14 +41,14 @@ public void testFieldCacheReuseAfterReopen() throws Exception { setMergePolicy(newLogMergePolicy(10)) ); Document doc = new Document(); - doc.add(new LegacyIntField("number", 17, Field.Store.NO)); + doc.add(new IntPoint("number", 17)); writer.addDocument(doc); writer.commit(); // Open reader1 DirectoryReader r = DirectoryReader.open(dir); LeafReader r1 = getOnlySegmentReader(r); - final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.INT_POINT_PARSER, false); assertEquals(17, ints.get(0)); // Add new segment @@ -61,7 +60,7 @@ public void testFieldCacheReuseAfterReopen() throws Exception { assertNotNull(r2); r.close(); LeafReader sub0 = r2.leaves().get(0).reader(); - final NumericDocValues ints2 = FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + final NumericDocValues ints2 = FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.INT_POINT_PARSER, false); r2.close(); assertTrue(ints == ints2); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java index f7dc0489ca71..f5c62e291c3e 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java @@ -94,11 +94,11 @@ public void testSanity() throws IOException { FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); - cache.getNumerics(readerA, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false); - cache.getNumerics(readerAclone, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false); - cache.getNumerics(readerB, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false); + cache.getNumerics(readerA, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false); + cache.getNumerics(readerAclone, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false); + cache.getNumerics(readerB, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false); - cache.getNumerics(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + cache.getNumerics(readerX, "theInt", FieldCache.LEGACY_INT_PARSER, false); // // // @@ -117,7 +117,7 @@ public void testInsanity1() throws IOException { FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); - cache.getNumerics(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false); + cache.getNumerics(readerX, "theInt", FieldCache.LEGACY_INT_PARSER, false); cache.getTerms(readerX, "theInt", false); // // // diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java index 717d36424e5e..f46bdde0c93d 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java @@ -23,11 +23,16 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; +import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.LegacyDoubleField; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LegacyFloatField; import org.apache.lucene.document.LegacyIntField; import org.apache.lucene.document.LegacyLongField; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -446,6 +451,140 @@ public void testFieldScoreReverse() throws Exception { /** Tests sorting on type int */ public void testInt() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new IntPoint("value", 300000)); + doc.add(new StoredField("value", 300000)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.INTEGER_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.INT)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // numeric order + assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("300000", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type int with a missing value */ + public void testIntMissing() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.INTEGER_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.INT)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // null is treated as a 0 + assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE */ + public void testIntMissingLast() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.INTEGER_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + SortField sortField = new SortField("value", SortField.Type.INT); + sortField.setMissingValue(Integer.MAX_VALUE); + Sort sort = new Sort(sortField); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // null is treated as a Integer.MAX_VALUE + assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type int in reverse */ + public void testIntReverse() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new IntPoint("value", 300000)); + doc.add(new StoredField("value", 300000)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new IntPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.INTEGER_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.INT, true)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // reverse numeric order + assertEquals("300000", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type legacy int */ + public void testLegacyInt() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -458,7 +597,7 @@ public void testInt() throws IOException { doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER)); + Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -475,8 +614,8 @@ public void testInt() throws IOException { dir.close(); } - /** Tests sorting on type int with a missing value */ - public void testIntMissing() throws IOException { + /** Tests sorting on type legacy int with a missing value */ + public void testLegacyIntMissing() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -488,7 +627,7 @@ public void testIntMissing() throws IOException { doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER)); + Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -505,8 +644,8 @@ public void testIntMissing() throws IOException { dir.close(); } - /** Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE */ - public void testIntMissingLast() throws IOException { + /** Tests sorting on type legacy int, specifying the missing value should be treated as Integer.MAX_VALUE */ + public void testLegacyIntMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -518,7 +657,7 @@ public void testIntMissingLast() throws IOException { doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER)); + Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -537,8 +676,8 @@ public void testIntMissingLast() throws IOException { dir.close(); } - /** Tests sorting on type int in reverse */ - public void testIntReverse() throws IOException { + /** Tests sorting on type legacy int in reverse */ + public void testLegacyIntReverse() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -551,7 +690,7 @@ public void testIntReverse() throws IOException { doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER)); + Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -570,6 +709,140 @@ public void testIntReverse() throws IOException { /** Tests sorting on type long */ public void testLong() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new LongPoint("value", 3000000000L)); + doc.add(new StoredField("value", 3000000000L)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.LONG_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.LONG)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // numeric order + assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("3000000000", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type long with a missing value */ + public void testLongMissing() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.LONG_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.LONG)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // null is treated as 0 + assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE */ + public void testLongMissingLast() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.LONG_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + SortField sortField = new SortField("value", SortField.Type.LONG); + sortField.setMissingValue(Long.MAX_VALUE); + Sort sort = new Sort(sortField); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // null is treated as Long.MAX_VALUE + assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type long in reverse */ + public void testLongReverse() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new LongPoint("value", 3000000000L)); + doc.add(new StoredField("value", 3000000000L)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", -1)); + doc.add(new StoredField("value", -1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new LongPoint("value", 4)); + doc.add(new StoredField("value", 4)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.LONG_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // reverse numeric order + assertEquals("3000000000", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type legacy long */ + public void testLegacyLong() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -582,7 +855,7 @@ public void testLong() throws IOException { doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG)); + Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -599,8 +872,8 @@ public void testLong() throws IOException { dir.close(); } - /** Tests sorting on type long with a missing value */ - public void testLongMissing() throws IOException { + /** Tests sorting on type legacy long with a missing value */ + public void testLegacyLongMissing() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -612,7 +885,7 @@ public void testLongMissing() throws IOException { doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG)); + Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -629,8 +902,8 @@ public void testLongMissing() throws IOException { dir.close(); } - /** Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE */ - public void testLongMissingLast() throws IOException { + /** Tests sorting on type legacy long, specifying the missing value should be treated as Long.MAX_VALUE */ + public void testLegacyLongMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -642,7 +915,7 @@ public void testLongMissingLast() throws IOException { doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG)); + Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -661,8 +934,8 @@ public void testLongMissingLast() throws IOException { dir.close(); } - /** Tests sorting on type long in reverse */ - public void testLongReverse() throws IOException { + /** Tests sorting on type legacy long in reverse */ + public void testLegacyLongReverse() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -675,7 +948,7 @@ public void testLongReverse() throws IOException { doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG)); + Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -694,6 +967,140 @@ public void testLongReverse() throws IOException { /** Tests sorting on type float */ public void testFloat() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new FloatPoint("value", 30.1f)); + doc.add(new StoredField("value", 30.1f)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", -1.3f)); + doc.add(new StoredField("value", -1.3f)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", 4.2f)); + doc.add(new StoredField("value", 4.2f)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.FLOAT_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // numeric order + assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("30.1", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type float with a missing value */ + public void testFloatMissing() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", -1.3f)); + doc.add(new StoredField("value", -1.3f)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", 4.2f)); + doc.add(new StoredField("value", 4.2f)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.FLOAT_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // null is treated as 0 + assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("4.2", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE */ + public void testFloatMissingLast() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", -1.3f)); + doc.add(new StoredField("value", -1.3f)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", 4.2f)); + doc.add(new StoredField("value", 4.2f)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.FLOAT_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + SortField sortField = new SortField("value", SortField.Type.FLOAT); + sortField.setMissingValue(Float.MAX_VALUE); + Sort sort = new Sort(sortField); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // null is treated as Float.MAX_VALUE + assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type float in reverse */ + public void testFloatReverse() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new FloatPoint("value", 30.1f)); + doc.add(new StoredField("value", 30.1f)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", -1.3f)); + doc.add(new StoredField("value", -1.3f)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new FloatPoint("value", 4.2f)); + doc.add(new StoredField("value", 4.2f)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.FLOAT_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(3, td.totalHits); + // reverse numeric order + assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("-1.3", searcher.doc(td.scoreDocs[2].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type legacy float */ + public void testLegacyFloat() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -706,7 +1113,7 @@ public void testFloat() throws IOException { doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT)); + Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -723,8 +1130,8 @@ public void testFloat() throws IOException { dir.close(); } - /** Tests sorting on type float with a missing value */ - public void testFloatMissing() throws IOException { + /** Tests sorting on type legacy float with a missing value */ + public void testLegacyFloatMissing() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -736,7 +1143,7 @@ public void testFloatMissing() throws IOException { doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT)); + Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -753,8 +1160,8 @@ public void testFloatMissing() throws IOException { dir.close(); } - /** Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE */ - public void testFloatMissingLast() throws IOException { + /** Tests sorting on type legacy float, specifying the missing value should be treated as Float.MAX_VALUE */ + public void testLegacyFloatMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -766,7 +1173,7 @@ public void testFloatMissingLast() throws IOException { doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT)); + Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -785,8 +1192,8 @@ public void testFloatMissingLast() throws IOException { dir.close(); } - /** Tests sorting on type float in reverse */ - public void testFloatReverse() throws IOException { + /** Tests sorting on type legacy float in reverse */ + public void testLegacyFloatReverse() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -799,7 +1206,7 @@ public void testFloatReverse() throws IOException { doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT)); + Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -818,6 +1225,195 @@ public void testFloatReverse() throws IOException { /** Tests sorting on type double */ public void testDouble() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new DoublePoint("value", 30.1)); + doc.add(new StoredField("value", 30.1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", -1.3)); + doc.add(new StoredField("value", -1.3)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333333)); + doc.add(new StoredField("value", 4.2333333333333)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333332)); + doc.add(new StoredField("value", 4.2333333333332)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.DOUBLE_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(4, td.totalHits); + // numeric order + assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value")); + assertEquals("30.1", searcher.doc(td.scoreDocs[3].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type double with +/- zero */ + public void testDoubleSignedZero() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new DoublePoint("value", +0d)); + doc.add(new StoredField("value", +0d)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", -0d)); + doc.add(new StoredField("value", -0d)); + writer.addDocument(doc); + doc = new Document(); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.DOUBLE_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(2, td.totalHits); + // numeric order + double v0 = searcher.doc(td.scoreDocs[0].doc).getField("value").numericValue().doubleValue(); + double v1 = searcher.doc(td.scoreDocs[1].doc).getField("value").numericValue().doubleValue(); + assertEquals(0, v0, 0d); + assertEquals(0, v1, 0d); + // check sign bits + assertEquals(1, Double.doubleToLongBits(v0) >>> 63); + assertEquals(0, Double.doubleToLongBits(v1) >>> 63); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type double with a missing value */ + public void testDoubleMissing() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", -1.3)); + doc.add(new StoredField("value", -1.3)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333333)); + doc.add(new StoredField("value", 4.2333333333333)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333332)); + doc.add(new StoredField("value", 4.2333333333332)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.DOUBLE_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(4, td.totalHits); + // null treated as a 0 + assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value")); + assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[3].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type double, specifying the missing value should be treated as Double.MAX_VALUE */ + public void testDoubleMissingLast() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", -1.3)); + doc.add(new StoredField("value", -1.3)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333333)); + doc.add(new StoredField("value", 4.2333333333333)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333332)); + doc.add(new StoredField("value", 4.2333333333332)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.DOUBLE_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + SortField sortField = new SortField("value", SortField.Type.DOUBLE); + sortField.setMissingValue(Double.MAX_VALUE); + Sort sort = new Sort(sortField); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(4, td.totalHits); + // null treated as Double.MAX_VALUE + assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value")); + assertNull(searcher.doc(td.scoreDocs[3].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type double in reverse */ + public void testDoubleReverse() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new DoublePoint("value", 30.1)); + doc.add(new StoredField("value", 30.1)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", -1.3)); + doc.add(new StoredField("value", -1.3)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333333)); + doc.add(new StoredField("value", 4.2333333333333)); + writer.addDocument(doc); + doc = new Document(); + doc.add(new DoublePoint("value", 4.2333333333332)); + doc.add(new StoredField("value", 4.2333333333332)); + writer.addDocument(doc); + IndexReader ir = UninvertingReader.wrap(writer.getReader(), + Collections.singletonMap("value", Type.DOUBLE_POINT)); + writer.close(); + + IndexSearcher searcher = newSearcher(ir, false); + Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true)); + + TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); + assertEquals(4, td.totalHits); + // numeric order + assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value")); + assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value")); + assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value")); + assertEquals("-1.3", searcher.doc(td.scoreDocs[3].doc).get("value")); + TestUtil.checkReader(ir); + ir.close(); + dir.close(); + } + + /** Tests sorting on type legacy double */ + public void testLegacyDouble() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -833,7 +1429,7 @@ public void testDouble() throws IOException { doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE)); + Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -851,8 +1447,8 @@ public void testDouble() throws IOException { dir.close(); } - /** Tests sorting on type double with +/- zero */ - public void testDoubleSignedZero() throws IOException { + /** Tests sorting on type legacy double with +/- zero */ + public void testLegacyDoubleSignedZero() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -863,7 +1459,7 @@ public void testDoubleSignedZero() throws IOException { writer.addDocument(doc); doc = new Document(); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE)); + Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -884,8 +1480,8 @@ public void testDoubleSignedZero() throws IOException { dir.close(); } - /** Tests sorting on type double with a missing value */ - public void testDoubleMissing() throws IOException { + /** Tests sorting on type legacy double with a missing value */ + public void testLegacyDoubleMissing() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -900,7 +1496,7 @@ public void testDoubleMissing() throws IOException { doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE)); + Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -918,8 +1514,8 @@ public void testDoubleMissing() throws IOException { dir.close(); } - /** Tests sorting on type double, specifying the missing value should be treated as Double.MAX_VALUE */ - public void testDoubleMissingLast() throws IOException { + /** Tests sorting on type legacy double, specifying the missing value should be treated as Double.MAX_VALUE */ + public void testLegacyDoubleMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -934,7 +1530,7 @@ public void testDoubleMissingLast() throws IOException { doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE)); + Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -954,8 +1550,8 @@ public void testDoubleMissingLast() throws IOException { dir.close(); } - /** Tests sorting on type double in reverse */ - public void testDoubleReverse() throws IOException { + /** Tests sorting on type legacy double in reverse */ + public void testLegacyDoubleReverse() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); @@ -971,7 +1567,7 @@ public void testDoubleReverse() throws IOException { doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE)); + Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); IndexSearcher searcher = newSearcher(ir); @@ -1062,7 +1658,7 @@ public void testMaxScore() throws Exception { } IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w), - Collections.singletonMap("id", Type.INTEGER)); + Collections.singletonMap("id", Type.LEGACY_INTEGER)); w.close(); Query q = new TermQuery(new Term("body", "text")); IndexSearcher s = newSearcher(r); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java index 0b6292d3c65c..f3bd455e6915 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java @@ -30,7 +30,8 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.LegacyIntField; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.IndexReader; @@ -118,7 +119,8 @@ private void testRandomStringSort(SortField.Type type) throws Exception { docValues.add(null); } - doc.add(new LegacyIntField("id", numDocs, Field.Store.YES)); + doc.add(new IntPoint("id", numDocs)); + doc.add(new StoredField("id", numDocs)); writer.addDocument(doc); numDocs++; @@ -130,7 +132,7 @@ private void testRandomStringSort(SortField.Type type) throws Exception { Map mapping = new HashMap<>(); mapping.put("stringdv", Type.SORTED); - mapping.put("id", Type.INTEGER); + mapping.put("id", Type.INTEGER_POINT); final IndexReader r = UninvertingReader.wrap(writer.getReader(), mapping); writer.close(); if (VERBOSE) { diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java index 1b1452289cc8..23b7d0c7a912 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java @@ -458,8 +458,8 @@ private void doTestMissingVsFieldCache(LongProducer longs) throws Exception { DirectoryReader ir = DirectoryReader.open(dir); for (LeafReaderContext context : ir.leaves()) { LeafReader r = context.reader(); - Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed"); - Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv"); + Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed", null); + Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv", null); assertEquals(expected, actual); } ir.close(); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java index 9b05ee112015..e716419de7ca 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java @@ -42,6 +42,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; +// TODO: what happened to this test... its not actually uninverting? public class TestFieldCacheWithThreads extends LuceneTestCase { public void test() throws Exception { @@ -83,7 +84,7 @@ public void test() throws Exception { public void run() { try { //NumericDocValues ndv = ar.getNumericDocValues("number"); - NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false); + NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false); //BinaryDocValues bdv = ar.getBinaryDocValues("bytes"); BinaryDocValues bdv = FieldCache.DEFAULT.getTerms(ar, "bytes", false); SortedDocValues sdv = FieldCache.DEFAULT.getTermsIndex(ar, "sorted"); @@ -93,16 +94,16 @@ public void run() { int docID = threadRandom.nextInt(numDocs); switch(threadRandom.nextInt(4)) { case 0: - assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false).get(docID)); + assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.INT_POINT_PARSER, false).get(docID)); break; case 1: - assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false).get(docID)); + assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false).get(docID)); break; case 2: - assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, false).get(docID)); + assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.FLOAT_POINT_PARSER, false).get(docID)); break; case 3: - assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false).get(docID)); + assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.DOUBLE_POINT_PARSER, false).get(docID)); break; } BytesRef term = bdv.get(docID); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java new file mode 100644 index 000000000000..c4ef1c4f4b76 --- /dev/null +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java @@ -0,0 +1,498 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.uninverting; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; + +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.BinaryDocValuesField; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LegacyDoubleField; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.LegacyFloatField; +import org.apache.lucene.document.LegacyIntField; +import org.apache.lucene.document.LegacyLongField; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.LegacyNumericUtils; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +/** random assortment of tests against legacy numerics */ +public class TestLegacyFieldCache extends LuceneTestCase { + private static LeafReader reader; + private static int NUM_DOCS; + private static Directory directory; + + @BeforeClass + public static void beforeClass() throws Exception { + NUM_DOCS = atLeast(500); + directory = newDirectory(); + RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + long theLong = Long.MAX_VALUE; + double theDouble = Double.MAX_VALUE; + int theInt = Integer.MAX_VALUE; + float theFloat = Float.MAX_VALUE; + if (VERBOSE) { + System.out.println("TEST: setUp"); + } + for (int i = 0; i < NUM_DOCS; i++){ + Document doc = new Document(); + doc.add(new LegacyLongField("theLong", theLong--, Field.Store.NO)); + doc.add(new LegacyDoubleField("theDouble", theDouble--, Field.Store.NO)); + doc.add(new LegacyIntField("theInt", theInt--, Field.Store.NO)); + doc.add(new LegacyFloatField("theFloat", theFloat--, Field.Store.NO)); + if (i%2 == 0) { + doc.add(new LegacyIntField("sparse", i, Field.Store.NO)); + } + + if (i%2 == 0) { + doc.add(new LegacyIntField("numInt", i, Field.Store.NO)); + } + writer.addDocument(doc); + } + IndexReader r = writer.getReader(); + reader = SlowCompositeReaderWrapper.wrap(r); + TestUtil.checkReader(reader); + writer.close(); + } + + @AfterClass + public static void afterClass() throws Exception { + reader.close(); + reader = null; + directory.close(); + directory = null; + } + + public void testInfoStream() throws Exception { + try { + FieldCache cache = FieldCache.DEFAULT; + ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); + cache.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8)); + cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false); + cache.getNumerics(reader, "theDouble", new FieldCache.Parser() { + @Override + public TermsEnum termsEnum(Terms terms) throws IOException { + return LegacyNumericUtils.filterPrefixCodedLongs(terms.iterator()); + } + @Override + public long parseValue(BytesRef term) { + int val = (int) LegacyNumericUtils.prefixCodedToLong(term); + if (val<0) val ^= 0x7fffffff; + return val; + } + }, false); + assertTrue(bos.toString(IOUtils.UTF_8).indexOf("WARNING") != -1); + } finally { + FieldCache.DEFAULT.setInfoStream(null); + FieldCache.DEFAULT.purgeAllCaches(); + } + } + + public void test() throws IOException { + FieldCache cache = FieldCache.DEFAULT; + NumericDocValues doubles = cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean()); + assertSame("Second request to cache return same array", doubles, cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean())); + for (int i = 0; i < NUM_DOCS; i++) { + assertEquals(Double.doubleToLongBits(Double.MAX_VALUE - i), doubles.get(i)); + } + + NumericDocValues longs = cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean()); + assertSame("Second request to cache return same array", longs, cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean())); + for (int i = 0; i < NUM_DOCS; i++) { + assertEquals(Long.MAX_VALUE - i, longs.get(i)); + } + + NumericDocValues ints = cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean()); + assertSame("Second request to cache return same array", ints, cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean())); + for (int i = 0; i < NUM_DOCS; i++) { + assertEquals(Integer.MAX_VALUE - i, ints.get(i)); + } + + NumericDocValues floats = cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean()); + assertSame("Second request to cache return same array", floats, cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean())); + for (int i = 0; i < NUM_DOCS; i++) { + assertEquals(Float.floatToIntBits(Float.MAX_VALUE - i), floats.get(i)); + } + + Bits docsWithField = cache.getDocsWithField(reader, "theLong", null); + assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "theLong", null)); + assertTrue("docsWithField(theLong) must be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits); + assertTrue("docsWithField(theLong) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS); + for (int i = 0; i < docsWithField.length(); i++) { + assertTrue(docsWithField.get(i)); + } + + docsWithField = cache.getDocsWithField(reader, "sparse", null); + assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "sparse", null)); + assertFalse("docsWithField(sparse) must not be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits); + assertTrue("docsWithField(sparse) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS); + for (int i = 0; i < docsWithField.length(); i++) { + assertEquals(i%2 == 0, docsWithField.get(i)); + } + + FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey()); + } + + public void testEmptyIndex() throws Exception { + Directory dir = newDirectory(); + IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(500)); + writer.close(); + IndexReader r = DirectoryReader.open(dir); + LeafReader reader = SlowCompositeReaderWrapper.wrap(r); + TestUtil.checkReader(reader); + FieldCache.DEFAULT.getTerms(reader, "foobar", true); + FieldCache.DEFAULT.getTermsIndex(reader, "foobar"); + FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey()); + r.close(); + dir.close(); + } + + public void testDocsWithField() throws Exception { + FieldCache cache = FieldCache.DEFAULT; + cache.purgeAllCaches(); + assertEquals(0, cache.getCacheEntries().length); + cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, true); + + // The double[] takes one slots, and docsWithField should also + // have been populated: + assertEquals(2, cache.getCacheEntries().length); + Bits bits = cache.getDocsWithField(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER); + + // No new entries should appear: + assertEquals(2, cache.getCacheEntries().length); + assertTrue(bits instanceof Bits.MatchAllBits); + + NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true); + assertEquals(4, cache.getCacheEntries().length); + Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.LEGACY_INT_PARSER); + assertEquals(4, cache.getCacheEntries().length); + for (int i = 0; i < docsWithField.length(); i++) { + if (i%2 == 0) { + assertTrue(docsWithField.get(i)); + assertEquals(i, ints.get(i)); + } else { + assertFalse(docsWithField.get(i)); + } + } + + NumericDocValues numInts = cache.getNumerics(reader, "numInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean()); + docsWithField = cache.getDocsWithField(reader, "numInt", FieldCache.LEGACY_INT_PARSER); + for (int i = 0; i < docsWithField.length(); i++) { + if (i%2 == 0) { + assertTrue(docsWithField.get(i)); + assertEquals(i, numInts.get(i)); + } else { + assertFalse(docsWithField.get(i)); + } + } + } + + public void testGetDocsWithFieldThreadSafety() throws Exception { + final FieldCache cache = FieldCache.DEFAULT; + cache.purgeAllCaches(); + + int NUM_THREADS = 3; + Thread[] threads = new Thread[NUM_THREADS]; + final AtomicBoolean failed = new AtomicBoolean(); + final AtomicInteger iters = new AtomicInteger(); + final int NUM_ITER = 200 * RANDOM_MULTIPLIER; + final CyclicBarrier restart = new CyclicBarrier(NUM_THREADS, + new Runnable() { + @Override + public void run() { + cache.purgeAllCaches(); + iters.incrementAndGet(); + } + }); + for(int threadIDX=0;threadIDX= NUM_ITER) { + break; + } + } else if (op == 1) { + Bits docsWithField = cache.getDocsWithField(reader, "sparse", null); + for (int i = 0; i < docsWithField.length(); i++) { + assertEquals(i%2 == 0, docsWithField.get(i)); + } + } else { + NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true); + Bits docsWithField = cache.getDocsWithField(reader, "sparse", null); + for (int i = 0; i < docsWithField.length(); i++) { + if (i%2 == 0) { + assertTrue(docsWithField.get(i)); + assertEquals(i, ints.get(i)); + } else { + assertFalse(docsWithField.get(i)); + } + } + } + } + } catch (Throwable t) { + failed.set(true); + restart.reset(); + throw new RuntimeException(t); + } + } + }; + threads[threadIDX].start(); + } + + for(int threadIDX=0;threadIDX { + FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.LEGACY_INT_PARSER, false); + }); + + // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds() + expectThrows(IllegalStateException.class, () -> { + FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.LEGACY_INT_PARSER, false); + }); + + // Numeric type: can be retrieved via getInts() and so on + NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.LEGACY_INT_PARSER, false); + assertEquals(42, numeric.get(0)); + + // SortedSet type: can be retrieved via getDocTermOrds() + expectThrows(IllegalStateException.class, () -> { + FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.LEGACY_INT_PARSER, false); + }); + + ir.close(); + dir.close(); + } + + public void testNonexistantFields() throws Exception { + Directory dir = newDirectory(); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + iw.addDocument(doc); + DirectoryReader ir = iw.getReader(); + iw.close(); + + LeafReader ar = getOnlySegmentReader(ir); + + final FieldCache cache = FieldCache.DEFAULT; + cache.purgeAllCaches(); + assertEquals(0, cache.getCacheEntries().length); + + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true); + assertEquals(0, ints.get(0)); + + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true); + assertEquals(0, longs.get(0)); + + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true); + assertEquals(0, floats.get(0)); + + NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true); + assertEquals(0, doubles.get(0)); + + // check that we cached nothing + assertEquals(0, cache.getCacheEntries().length); + ir.close(); + dir.close(); + } + + public void testNonIndexedFields() throws Exception { + Directory dir = newDirectory(); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(new StoredField("bogusbytes", "bogus")); + doc.add(new StoredField("bogusshorts", "bogus")); + doc.add(new StoredField("bogusints", "bogus")); + doc.add(new StoredField("boguslongs", "bogus")); + doc.add(new StoredField("bogusfloats", "bogus")); + doc.add(new StoredField("bogusdoubles", "bogus")); + doc.add(new StoredField("bogusbits", "bogus")); + iw.addDocument(doc); + DirectoryReader ir = iw.getReader(); + iw.close(); + + LeafReader ar = getOnlySegmentReader(ir); + + final FieldCache cache = FieldCache.DEFAULT; + cache.purgeAllCaches(); + assertEquals(0, cache.getCacheEntries().length); + + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true); + assertEquals(0, ints.get(0)); + + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true); + assertEquals(0, longs.get(0)); + + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true); + assertEquals(0, floats.get(0)); + + NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true); + assertEquals(0, doubles.get(0)); + + // check that we cached nothing + assertEquals(0, cache.getCacheEntries().length); + ir.close(); + dir.close(); + } + + // Make sure that the use of GrowableWriter doesn't prevent from using the full long range + public void testLongFieldCache() throws IOException { + Directory dir = newDirectory(); + IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); + cfg.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg); + Document doc = new Document(); + LegacyLongField field = new LegacyLongField("f", 0L, Store.YES); + doc.add(field); + final long[] values = new long[TestUtil.nextInt(random(), 1, 10)]; + for (int i = 0; i < values.length; ++i) { + final long v; + switch (random().nextInt(10)) { + case 0: + v = Long.MIN_VALUE; + break; + case 1: + v = 0; + break; + case 2: + v = Long.MAX_VALUE; + break; + default: + v = TestUtil.nextLong(random(), -10, 10); + break; + } + values[i] = v; + if (v == 0 && random().nextBoolean()) { + // missing + iw.addDocument(new Document()); + } else { + field.setLongValue(v); + iw.addDocument(doc); + } + } + iw.forceMerge(1); + final DirectoryReader reader = iw.getReader(); + final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false); + for (int i = 0; i < values.length; ++i) { + assertEquals(values[i], longs.get(i)); + } + reader.close(); + iw.close(); + dir.close(); + } + + // Make sure that the use of GrowableWriter doesn't prevent from using the full int range + public void testIntFieldCache() throws IOException { + Directory dir = newDirectory(); + IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); + cfg.setMergePolicy(newLogMergePolicy()); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg); + Document doc = new Document(); + LegacyIntField field = new LegacyIntField("f", 0, Store.YES); + doc.add(field); + final int[] values = new int[TestUtil.nextInt(random(), 1, 10)]; + for (int i = 0; i < values.length; ++i) { + final int v; + switch (random().nextInt(10)) { + case 0: + v = Integer.MIN_VALUE; + break; + case 1: + v = 0; + break; + case 2: + v = Integer.MAX_VALUE; + break; + default: + v = TestUtil.nextInt(random(), -10, 10); + break; + } + values[i] = v; + if (v == 0 && random().nextBoolean()) { + // missing + iw.addDocument(new Document()); + } else { + field.setIntValue(v); + iw.addDocument(doc); + } + } + iw.forceMerge(1); + final DirectoryReader reader = iw.getReader(); + final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false); + for (int i = 0; i < values.length; ++i) { + assertEquals(values[i], ints.get(i)); + } + reader.close(); + iw.close(); + dir.close(); + } + +} diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java index bc85db4b3713..a0cddf88c6b4 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java @@ -96,9 +96,9 @@ public static void beforeClass() throws Exception { } Map map = new HashMap<>(); - map.put("field2", Type.INTEGER); - map.put("field4", Type.INTEGER); - map.put("field8", Type.INTEGER); + map.put("field2", Type.LEGACY_INTEGER); + map.put("field4", Type.LEGACY_INTEGER); + map.put("field8", Type.LEGACY_INTEGER); reader = UninvertingReader.wrap(writer.getReader(), map); searcher=newSearcher(reader); writer.close(); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java index d9fcc92eeddf..0724d86feeb6 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java @@ -100,10 +100,10 @@ public static void beforeClass() throws Exception { writer.addDocument(doc); } Map map = new HashMap<>(); - map.put("field2", Type.LONG); - map.put("field4", Type.LONG); - map.put("field6", Type.LONG); - map.put("field8", Type.LONG); + map.put("field2", Type.LEGACY_LONG); + map.put("field4", Type.LEGACY_LONG); + map.put("field6", Type.LEGACY_LONG); + map.put("field8", Type.LEGACY_LONG); reader = UninvertingReader.wrap(writer.getReader(), map); searcher=newSearcher(reader); writer.close(); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java index 99df329284d6..0a1cf3d40488 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java @@ -363,8 +363,9 @@ public void testFieldInfos() throws IOException { iw.close(); Map uninvertingMap = new HashMap<>(); - uninvertingMap.put("int", Type.INTEGER); - uninvertingMap.put("dv", Type.INTEGER); + uninvertingMap.put("int", Type.LEGACY_INTEGER); + uninvertingMap.put("dv", Type.LEGACY_INTEGER); + uninvertingMap.put("dint", Type.INTEGER_POINT); DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), uninvertingMap); @@ -376,6 +377,7 @@ public void testFieldInfos() throws IOException { assertEquals(0, intFInfo.getPointNumBytes()); FieldInfo dintFInfo = leafReader.getFieldInfos().fieldInfo("dint"); + assertEquals(DocValuesType.NUMERIC, dintFInfo.getDocValuesType()); assertEquals(1, dintFInfo.getPointDimensionCount()); assertEquals(4, dintFInfo.getPointNumBytes()); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java index 529e98bc5587..8ccb9afdbe56 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java @@ -73,8 +73,8 @@ public void setUp() throws Exception { super.setUp(); // TODO: change this module to index docvalues instead of uninverting uninvertMap.clear(); - uninvertMap.put("pointvector__x", Type.DOUBLE); - uninvertMap.put("pointvector__y", Type.DOUBLE); + uninvertMap.put("pointvector__x", Type.LEGACY_DOUBLE); + uninvertMap.put("pointvector__y", Type.LEGACY_DOUBLE); directory = newDirectory(); final Random random = random(); diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java index e1fb42057beb..cbf1d4eb9e8d 100644 --- a/solr/core/src/java/org/apache/solr/schema/EnumField.java +++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java @@ -194,7 +194,7 @@ public Type getUninversionType(SchemaField sf) { if (sf.multiValued()) { return Type.SORTED_SET_INTEGER; } else { - return Type.INTEGER; + return Type.LEGACY_INTEGER; } } diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java index 572bf8884704..c4899a106045 100644 --- a/solr/core/src/java/org/apache/solr/schema/TrieField.java +++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java @@ -203,14 +203,14 @@ public Type getUninversionType(SchemaField sf) { } else { switch (type) { case INTEGER: - return Type.INTEGER; + return Type.LEGACY_INTEGER; case LONG: case DATE: - return Type.LONG; + return Type.LEGACY_LONG; case FLOAT: - return Type.FLOAT; + return Type.LEGACY_FLOAT; case DOUBLE: - return Type.DOUBLE; + return Type.LEGACY_DOUBLE; default: throw new AssertionError(); } From 6689e1c55a7efffadb18d25179d9d87c121af5d0 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Fri, 11 Mar 2016 00:49:49 +0530 Subject: [PATCH 0094/1113] SOLR-8135: SolrCloudExampleTest.testLoadDocsIntoGettingStartedCollection reproducible failure --- .../src/java/org/apache/solr/core/CoreContainer.java | 6 ++++-- solr/core/src/java/org/apache/solr/core/SolrCore.java | 10 +++++++--- .../src/java/org/apache/solr/update/SolrCoreState.java | 8 +++++++- .../org/apache/solr/cloud/SolrCloudExampleTest.java | 1 - 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index 1d614e343dca..c140fb4dba8e 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -62,6 +62,7 @@ import org.apache.solr.security.HttpClientInterceptorPlugin; import org.apache.solr.security.PKIAuthenticationPlugin; import org.apache.solr.security.SecurityPluginHolder; +import org.apache.solr.update.SolrCoreState; import org.apache.solr.update.UpdateShardHandler; import org.apache.solr.util.DefaultSolrThreadFactory; import org.apache.zookeeper.KeeperException; @@ -916,8 +917,9 @@ public void reload(String name) { log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName()); SolrCore newCore = core.reload(coreConfig); registerCore(name, newCore, false); - } - catch (Exception e) { + } catch (SolrCoreState.CoreIsClosedException e) { + throw e; + } catch (Exception e) { coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e)); throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e); } diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index cde878a5ebf3..7a65a72decbb 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -534,8 +534,8 @@ void initIndex(boolean reload) throws IOException { // Create the index if it doesn't exist. if(!indexExists) { - log.warn(logid+"Solr index directory '" + new File(indexDir) + "' doesn't exist." - + " Creating new index..."); + log.warn(logid + "Solr index directory '" + new File(indexDir) + "' doesn't exist." + + " Creating new index..."); SolrIndexWriter writer = SolrIndexWriter.create(this, "SolrCore.initIndex", indexDir, getDirectoryFactory(), true, getLatestSchema(), solrConfig.indexConfig, solrDelPolicy, codec); @@ -2501,7 +2501,11 @@ public static Runnable getConfListener(SolrCore core, ZkSolrResourceLoader zkSol checkStale(zkClient, solrConfigPath, overlayVersion) || checkStale(zkClient, managedSchmaResourcePath, managedSchemaVersion)) { log.info("core reload {}", coreName); - cc.reload(coreName); + try { + cc.reload(coreName); + } catch (SolrCoreState.CoreIsClosedException e) { + /*no problem this core is already closed*/ + } return; } //some files in conf directory may have other than managedschema, overlay, params diff --git a/solr/core/src/java/org/apache/solr/update/SolrCoreState.java b/solr/core/src/java/org/apache/solr/update/SolrCoreState.java index 42727b4a64a4..fc0bca8a125c 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrCoreState.java +++ b/solr/core/src/java/org/apache/solr/update/SolrCoreState.java @@ -51,7 +51,7 @@ public Object getUpdateLock() { public void increfSolrCoreState() { synchronized (this) { if (solrCoreStateRefCnt == 0) { - throw new IllegalStateException("IndexWriter has been closed"); + throw new CoreIsClosedException("IndexWriter has been closed"); } solrCoreStateRefCnt++; } @@ -157,4 +157,10 @@ public interface IndexWriterCloser { public abstract boolean getLastReplicateIndexSuccess(); public abstract void setLastReplicateIndexSuccess(boolean success); + + public static class CoreIsClosedException extends IllegalStateException { + public CoreIsClosedException(String s) { + super(s); + } + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java index e889d90b3c72..f084c2073455 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java @@ -43,7 +43,6 @@ * this test is useful for catching regressions in indexing the example docs in collections that * use data-driven schema and managed schema features provided by configsets/data_driven_schema_configs. */ -@LuceneTestCase.BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-8135") public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); From 96b059c9dd67eef3a49da63d388fac7f4d3809f2 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Fri, 11 Mar 2016 00:52:52 +0530 Subject: [PATCH 0095/1113] SOLR-8135: SolrCloudExampleTest.testLoadDocsIntoGettingStartedCollection reproducible failure --- solr/CHANGES.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index feb0e10dc2ea..0aca8e09c5af 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -283,6 +283,8 @@ Bug Fixes other than count, resulted in incorrect results. This has been fixed, and facet.prefix support for facet.method=uif has been enabled. (Mikhail Khludnev, yonik) +* SOLR-8135: If a core reload is fired after a core close, it is not a recoverable error (noble) + Optimizations ---------------------- * SOLR-7876: Speed up queries and operations that use many terms when timeAllowed has not been From 4a795ee371c1bb19285cca0fb7336807f6e2840f Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Fri, 11 Mar 2016 00:57:26 +0530 Subject: [PATCH 0096/1113] SOLR-8135: typo in comment --- solr/CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 0aca8e09c5af..a1b14d6afdc8 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -283,7 +283,7 @@ Bug Fixes other than count, resulted in incorrect results. This has been fixed, and facet.prefix support for facet.method=uif has been enabled. (Mikhail Khludnev, yonik) -* SOLR-8135: If a core reload is fired after a core close, it is not a recoverable error (noble) +* SOLR-8135: If a core reload is fired after a core close, it is not a non-recoverable error (noble) Optimizations ---------------------- From 9f48f53a086f809490e2b1e00cabe3cc29bf2b08 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Thu, 10 Mar 2016 16:34:04 -0500 Subject: [PATCH 0097/1113] fix rare-ish test bug --- .../apache/lucene/index/TestAllFilesCheckIndexHeader.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java index f6c1486bc433..c430281b117f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java @@ -28,6 +28,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; import org.apache.lucene.util.LuceneTestCase; @@ -86,6 +87,12 @@ private void checkIndexHeader(Directory dir) throws IOException { private void checkOneFile(Directory dir, String victim) throws IOException { try (BaseDirectoryWrapper dirCopy = newDirectory()) { dirCopy.setCheckIndexOnClose(false); + + if (dirCopy instanceof MockDirectoryWrapper) { + // The while(true) loop below, under rarish circumstances, can sometimes double write: + ((MockDirectoryWrapper) dirCopy).setPreventDoubleWrite(false); + } + long victimLength = dir.fileLength(victim); int wrongBytes = TestUtil.nextInt(random(), 1, (int) Math.min(100, victimLength)); assert victimLength > 0; From 7927a3101e1f446939bfa311968346ddd57c29f3 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Thu, 10 Mar 2016 16:08:59 -0500 Subject: [PATCH 0098/1113] LUCENE-7086: move SlowCompositeReaderWrapper to misc module, and throw clear exc if you try to use in with points Squashed commits: commit e26b065c71388407bc6725256ca43d7bb30dee29 Author: Mike McCandless Date: Thu Mar 10 14:16:45 2016 -0500 simplify the checking for incoming points commit b7254376dcb398c7739aab4544118bb4526961d5 Merge: 8ec82a0 d35d569 Author: Mike McCandless Date: Thu Mar 10 14:05:24 2016 -0500 Merge branch 'master' into slow_wrapper Conflicts: lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java commit 8ec82a0d9a0dd946d96da20962bb2ea95758edbe Author: Mike McCandless Date: Thu Mar 10 13:56:25 2016 -0500 LUCENE-7086: move SlowCompositeReaderWrapper to misc module --- .../lucene50/TestLucene50DocValuesFormat.java | 2 +- .../ClassificationTestBase.java | 7 +- .../DocumentClassificationTestBase.java | 4 +- .../utils/DataSplitterTest.java | 5 +- .../apache/lucene/index/CompositeReader.java | 2 - .../org/apache/lucene/index/IndexReader.java | 2 - .../apache/lucene/index/MultiDocValues.java | 6 +- .../lucene54/TestLucene54DocValuesFormat.java | 6 +- .../index/TestBinaryDocValuesUpdates.java | 28 +++--- .../apache/lucene/index/TestCustomNorms.java | 4 +- .../index/TestDemoParallelLeafReader.java | 21 +---- .../lucene/index/TestDirectoryReader.java | 2 +- .../apache/lucene/index/TestDocValues.java | 14 +-- .../lucene/index/TestDocValuesIndexing.java | 25 +++-- .../lucene/index/TestDocsAndPositions.java | 4 +- .../lucene/index/TestDocumentWriter.java | 2 +- .../lucene/index/TestDuelingCodecs.java | 4 +- .../index/TestExitableDirectoryReader.java | 17 ++-- .../lucene/index/TestFilterLeafReader.java | 8 +- .../org/apache/lucene/index/TestFlex.java | 2 +- .../lucene/index/TestIndexReaderClose.java | 52 +---------- .../apache/lucene/index/TestIndexWriter.java | 6 +- .../lucene/index/TestLazyProxSkipping.java | 2 +- .../lucene/index/TestMultiDocValues.java | 16 ++-- .../lucene/index/TestMultiLevelSkipList.java | 2 +- .../org/apache/lucene/index/TestNorms.java | 4 +- .../index/TestNumericDocValuesUpdates.java | 30 +++--- .../apache/lucene/index/TestOmitNorms.java | 8 +- .../lucene/index/TestOmitPositions.java | 2 +- .../org/apache/lucene/index/TestOmitTf.java | 6 +- .../apache/lucene/index/TestOrdinalMap.java | 5 +- .../index/TestParallelCompositeReader.java | 34 +------ .../lucene/index/TestParallelLeafReader.java | 28 +++--- .../index/TestParallelReaderEmptyIndex.java | 22 ++--- .../lucene/index/TestParallelTermEnum.java | 4 +- .../org/apache/lucene/index/TestPayloads.java | 11 ++- .../apache/lucene/index/TestPointValues.java | 4 +- .../lucene/index/TestPostingsOffsets.java | 5 +- .../apache/lucene/index/TestReaderClosed.java | 6 +- .../index/TestReaderWrapperDVTypeCheck.java | 5 +- .../lucene/index/TestSegmentTermEnum.java | 2 +- .../lucene/index/TestStressAdvance.java | 2 +- .../apache/lucene/index/TestTermsEnum.java | 6 +- .../search/TestDisjunctionMaxQuery.java | 6 +- .../lucene/search/TestMinShouldMatch2.java | 2 +- .../lucene/search/TestMultiPhraseEnum.java | 8 +- .../apache/lucene/search/TestPhraseQuery.java | 4 +- .../lucene/search/TestPositionIncrement.java | 10 +- .../lucene/search/TestSimilarityProvider.java | 11 +-- .../apache/lucene/search/TestTermScorer.java | 7 +- .../TestUsageTrackingFilterCachingPolicy.java | 17 +++- .../spans/TestFieldMaskingSpanQuery.java | 11 ++- .../search/spans/TestNearSpansOrdered.java | 29 +++--- .../search/spans/TestSpanCollection.java | 9 +- .../search/spans/TestSpanContainQuery.java | 7 +- .../apache/lucene/search/spans/TestSpans.java | 13 +-- .../DefaultSortedSetDocValuesReaderState.java | 55 +++++++++-- .../TestSortedSetDocValuesFacets.java | 36 -------- .../lucene/search/grouping/TestGrouping.java | 12 +-- .../lucene/search/join/TestJoinUtil.java | 8 +- .../memory/TestMemoryIndexAgainstRAMDir.java | 12 +-- .../index/SlowCompositeReaderWrapper.java | 4 + .../index/TestSlowCompositeReaderWrapper.java | 91 +++++++++++++++++++ .../lucene/uninverting/TestDocTermOrds.java | 12 +-- .../lucene/uninverting/TestFieldCache.java | 10 +- .../uninverting/TestFieldCacheReopen.java | 2 +- .../TestFieldCacheVsDocValues.java | 2 +- .../TestFieldCacheWithThreads.java | 2 +- .../uninverting/TestLegacyFieldCache.java | 10 +- .../lucene/queries/CommonTermsQueryTest.java | 6 +- .../apache/lucene/queries/TermsQueryTest.java | 4 +- .../function/TestSortedSetFieldSource.java | 2 +- .../queries/payloads/PayloadHelper.java | 3 +- .../queries/payloads/TestPayloadSpans.java | 39 ++++---- .../payloads/TestPayloadTermQuery.java | 11 ++- .../index/BaseDocValuesFormatTestCase.java | 64 +++++++------ .../index/BaseIndexFileFormatTestCase.java | 6 +- .../index/BasePointsFormatTestCase.java | 4 +- .../index/BasePostingsFormatTestCase.java | 66 +++++++------- .../index/BaseStoredFieldsFormatTestCase.java | 2 +- .../index/BaseTermVectorsFormatTestCase.java | 12 +-- .../org/apache/lucene/search/QueryUtils.java | 6 +- .../search/spans/MultiSpansWrapper.java | 51 ----------- .../apache/lucene/util/LuceneTestCase.java | 53 +++++------ .../lucene/analysis/TestMockAnalyzer.java | 2 +- .../TestCompressingStoredFieldsFormat.java | 5 +- .../TestCompressingTermVectorsFormat.java | 8 +- .../lucene/index/TestAssertingLeafReader.java | 5 +- 88 files changed, 557 insertions(+), 609 deletions(-) rename lucene/{core => misc}/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java (98%) create mode 100644 lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java delete mode 100644 lucene/test-framework/src/java/org/apache/lucene/search/spans/MultiSpansWrapper.java diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java index 2c38728446ce..9f174dd23523 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java @@ -200,7 +200,7 @@ public DocValuesFormat getDocValuesFormatForField(String field) { // now compare again after the merge ir = writer.getReader(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); Terms terms = ar.terms("indexed"); if (terms != null) { assertEquals(terms.size(), ar.getSortedSetDocValues("dv").getValueCount()); diff --git a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java index 605b4905e801..331a74b70427 100644 --- a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java +++ b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java @@ -27,7 +27,6 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -190,7 +189,8 @@ protected LeafReader getSampleIndex(Analyzer analyzer) throws IOException { indexWriter.addDocument(doc); indexWriter.commit(); - return SlowCompositeReaderWrapper.wrap(indexWriter.getReader()); + indexWriter.forceMerge(1); + return getOnlyLeafReader(indexWriter.getReader()); } protected LeafReader getRandomIndex(Analyzer analyzer, int size) throws IOException { @@ -213,7 +213,8 @@ protected LeafReader getRandomIndex(Analyzer analyzer, int size) throws IOExcept indexWriter.addDocument(doc); } indexWriter.commit(); - return SlowCompositeReaderWrapper.wrap(indexWriter.getReader()); + indexWriter.forceMerge(1); + return getOnlyLeafReader(indexWriter.getReader()); } private String createRandomString(Random random) { diff --git a/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java b/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java index 316802b0eee4..4193bde1679c 100644 --- a/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java +++ b/lucene/classification/src/test/org/apache/lucene/classification/document/DocumentClassificationTestBase.java @@ -30,7 +30,6 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.util.BytesRef; import org.junit.Before; @@ -202,7 +201,8 @@ protected LeafReader populateDocumentClassificationIndex(Analyzer analyzer) thro indexWriter.addDocument(doc); indexWriter.commit(); - return SlowCompositeReaderWrapper.wrap(indexWriter.getReader()); + indexWriter.forceMerge(1); + return getOnlyLeafReader(indexWriter.getReader()); } protected Document getVideoGameDocument() { diff --git a/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java b/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java index d69fefba1e84..2984bb55d450 100644 --- a/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java +++ b/lucene/classification/src/test/org/apache/lucene/classification/utils/DataSplitterTest.java @@ -27,7 +27,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.util.TestUtil; @@ -75,9 +74,9 @@ public void setUp() throws Exception { } indexWriter.commit(); + indexWriter.forceMerge(1); - originalIndex = SlowCompositeReaderWrapper.wrap(indexWriter.getReader()); - + originalIndex = getOnlyLeafReader(indexWriter.getReader()); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java b/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java index 0f6a44eb1e39..83bb92a82782 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/CompositeReader.java @@ -26,8 +26,6 @@ be used to get stored fields from the underlying LeafReaders, but it is not possible to directly retrieve postings. To do that, get the {@link LeafReaderContext} for all sub-readers via {@link #leaves()}. - Alternatively, you can mimic an {@link LeafReader} (with a serious slowdown), - by wrapping composite readers with {@link SlowCompositeReaderWrapper}.

      IndexReader instances for indexes on disk are usually constructed with a call to one of the static DirectoryReader.open() methods, diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexReader.java b/lucene/core/src/java/org/apache/lucene/index/IndexReader.java index 865f816bea20..976f548317bd 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexReader.java @@ -56,8 +56,6 @@ be used to get stored fields from the underlying LeafReaders, but it is not possible to directly retrieve postings. To do that, get the sub-readers via {@link CompositeReader#getSequentialSubReaders}. - Alternatively, you can mimic an {@link LeafReader} (with a serious slowdown), - by wrapping composite readers with {@link SlowCompositeReaderWrapper}.

    IndexReader instances for indexes on disk are usually constructed diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java index 383139f45f84..33947974bcee 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java @@ -486,8 +486,8 @@ public static OrdinalMap build(Object owner, TermsEnum subs[], long[] weights, f private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(OrdinalMap.class); - // cache key of whoever asked for this awful thing - final Object owner; + /** Cache key of whoever asked for this awful thing */ + public final Object owner; // globalOrd -> (globalOrd - segmentOrd) where segmentOrd is the the ordinal in the first segment that contains this term final PackedLongValues globalOrdDeltas; // globalOrd -> first segment container @@ -703,7 +703,7 @@ public static class MultiSortedSetDocValues extends SortedSetDocValues { LongValues currentGlobalOrds; /** Creates a new MultiSortedSetDocValues over values */ - MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException { + public MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException { assert docStarts.length == values.length + 1; this.values = values; this.docStarts = docStarts; diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java index cede1d7efc38..b6f178dc061e 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene54/TestLucene54DocValuesFormat.java @@ -362,7 +362,7 @@ public DocValuesFormat getDocValuesFormatForField(String field) { // now compare again after the merge ir = writer.getReader(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); Terms terms = ar.terms("indexed"); if (terms != null) { assertEquals(terms.size(), ar.getSortedSetDocValues("dv").getValueCount()); @@ -541,7 +541,7 @@ public void testSortedSetAroundBlockSize() throws IOException { w.forceMerge(1); DirectoryReader r = DirectoryReader.open(w); w.close(); - SegmentReader sr = getOnlySegmentReader(r); + LeafReader sr = getOnlyLeafReader(r); assertEquals(maxDoc, sr.maxDoc()); SortedSetDocValues values = sr.getSortedSetDocValues("sset"); assertNotNull(values); @@ -591,7 +591,7 @@ public void testSortedNumericAroundBlockSize() throws IOException { w.forceMerge(1); DirectoryReader r = DirectoryReader.open(w); w.close(); - SegmentReader sr = getOnlySegmentReader(r); + LeafReader sr = getOnlyLeafReader(r); assertEquals(maxDoc, sr.maxDoc()); SortedNumericDocValues values = sr.getSortedNumericDocValues("snum"); assertNotNull(values); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java index 4025f58bcbc1..8dc8a3c8a031 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java @@ -253,16 +253,14 @@ public void testUpdatesAndDeletes() throws Exception { writer.close(); } - LeafReader slow = SlowCompositeReaderWrapper.wrap(reader); - - Bits liveDocs = slow.getLiveDocs(); + Bits liveDocs = MultiFields.getLiveDocs(reader); boolean[] expectedLiveDocs = new boolean[] { true, false, false, true, true, true }; for (int i = 0; i < expectedLiveDocs.length; i++) { assertEquals(expectedLiveDocs[i], liveDocs.get(i)); } long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17}; - BinaryDocValues bdv = slow.getBinaryDocValues("val"); + BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "val"); for (int i = 0; i < expectedValues.length; i++) { assertEquals(expectedValues[i], getValue(bdv, i)); } @@ -469,10 +467,9 @@ public DocValuesFormat getDocValuesFormatForField(String field) { final DirectoryReader reader = DirectoryReader.open(dir); - LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - BinaryDocValues bdv = r.getBinaryDocValues("bdv"); - SortedDocValues sdv = r.getSortedDocValues("sorted"); - for (int i = 0; i < r.maxDoc(); i++) { + BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "bdv"); + SortedDocValues sdv = MultiDocValues.getSortedValues(reader, "sorted"); + for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(17, getValue(bdv, i)); BytesRef term = sdv.get(i); assertEquals(new BytesRef("value"), term); @@ -499,9 +496,8 @@ public void testUpdateSameDocMultipleTimes() throws Exception { writer.close(); final DirectoryReader reader = DirectoryReader.open(dir); - final LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - BinaryDocValues bdv = r.getBinaryDocValues("bdv"); - for (int i = 0; i < r.maxDoc(); i++) { + BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "bdv"); + for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(3, getValue(bdv, i)); } reader.close(); @@ -598,9 +594,8 @@ public void testUpdateDocumentByMultipleTerms() throws Exception { writer.close(); final DirectoryReader reader = DirectoryReader.open(dir); - final LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - BinaryDocValues bdv = r.getBinaryDocValues("bdv"); - for (int i = 0; i < r.maxDoc(); i++) { + BinaryDocValues bdv = MultiDocValues.getBinaryValues(reader, "bdv"); + for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(3, getValue(bdv, i)); } reader.close(); @@ -1018,9 +1013,8 @@ public DocValuesFormat getDocValuesFormatForField(String field) { writer.close(); DirectoryReader reader = DirectoryReader.open(dir); - LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - BinaryDocValues f1 = r.getBinaryDocValues("f1"); - BinaryDocValues f2 = r.getBinaryDocValues("f2"); + BinaryDocValues f1 = MultiDocValues.getBinaryValues(reader, "f1"); + BinaryDocValues f2 = MultiDocValues.getBinaryValues(reader, "f2"); assertEquals(12L, getValue(f1, 0)); assertEquals(13L, getValue(f2, 0)); assertEquals(17L, getValue(f1, 1)); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java index 1d0ba540045b..c513093c42dc 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java @@ -66,8 +66,8 @@ public void testFloatNorms() throws IOException { } writer.commit(); writer.close(); - LeafReader open = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir)); - NumericDocValues norms = open.getNormValues(floatTestField); + DirectoryReader open = DirectoryReader.open(dir); + NumericDocValues norms = MultiDocValues.getNormValues(open, floatTestField); assertNotNull(norms); for (int i = 0; i < open.maxDoc(); i++) { Document document = open.document(i); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java index 0034cee0c453..9f3339c8c692 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java @@ -405,13 +405,8 @@ LeafReader getParallelLeafReader(final LeafReader leaf, boolean doCache, long sc //TestUtil.checkIndex(dir); SegmentInfos infos = SegmentInfos.readLatestCommit(dir); - final LeafReader parLeafReader; - if (infos.size() == 1) { - parLeafReader = new SegmentReader(infos.info(0), IOContext.DEFAULT); - } else { - // This just means we didn't forceMerge above: - parLeafReader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir)); - } + assert infos.size() == 1; + final LeafReader parLeafReader = new SegmentReader(infos.info(0), IOContext.DEFAULT); //checkParallelReader(leaf, parLeafReader, schemaGen); @@ -682,9 +677,7 @@ protected void reindex(long oldSchemaGen, long newSchemaGen, LeafReader reader, w.addDocument(newDoc); } - if (random().nextBoolean()) { - w.forceMerge(1); - } + w.forceMerge(1); w.close(); } @@ -750,9 +743,7 @@ protected void reindex(long oldSchemaGen, long newSchemaGen, LeafReader reader, } } - if (random().nextBoolean()) { - w.forceMerge(1); - } + w.forceMerge(1); w.close(); } @@ -845,9 +836,7 @@ protected void reindex(long oldSchemaGen, long newSchemaGen, LeafReader reader, } } - if (random().nextBoolean()) { - w.forceMerge(1); - } + w.forceMerge(1); w.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java index 24b2c5095e57..8e62094c991a 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java @@ -761,7 +761,7 @@ public void testUniqueTermCount() throws Exception { writer.commit(); DirectoryReader r = DirectoryReader.open(dir); - LeafReader r1 = getOnlySegmentReader(r); + LeafReader r1 = getOnlyLeafReader(r); assertEquals(26, r1.terms("field").size()); assertEquals(10, r1.terms("number").size()); writer.addDocument(doc); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java b/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java index ad4d60cad9dc..2266caf19f00 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDocValues.java @@ -41,7 +41,7 @@ public void testEmptyIndex() throws Exception { IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); iw.addDocument(new Document()); DirectoryReader dr = DirectoryReader.open(iw); - LeafReader r = getOnlySegmentReader(dr); + LeafReader r = getOnlyLeafReader(dr); // ok assertNotNull(DocValues.getBinary(r, "bogus")); @@ -66,7 +66,7 @@ public void testMisconfiguredField() throws Exception { doc.add(new StringField("foo", "bar", Field.Store.NO)); iw.addDocument(doc); DirectoryReader dr = DirectoryReader.open(iw); - LeafReader r = getOnlySegmentReader(dr); + LeafReader r = getOnlyLeafReader(dr); // errors expectThrows(IllegalStateException.class, () -> { @@ -103,7 +103,7 @@ public void testNumericField() throws Exception { doc.add(new NumericDocValuesField("foo", 3)); iw.addDocument(doc); DirectoryReader dr = DirectoryReader.open(iw); - LeafReader r = getOnlySegmentReader(dr); + LeafReader r = getOnlyLeafReader(dr); // ok assertNotNull(DocValues.getNumeric(r, "foo")); @@ -136,7 +136,7 @@ public void testBinaryField() throws Exception { doc.add(new BinaryDocValuesField("foo", new BytesRef("bar"))); iw.addDocument(doc); DirectoryReader dr = DirectoryReader.open(iw); - LeafReader r = getOnlySegmentReader(dr); + LeafReader r = getOnlyLeafReader(dr); // ok assertNotNull(DocValues.getBinary(r, "foo")); @@ -171,7 +171,7 @@ public void testSortedField() throws Exception { doc.add(new SortedDocValuesField("foo", new BytesRef("bar"))); iw.addDocument(doc); DirectoryReader dr = DirectoryReader.open(iw); - LeafReader r = getOnlySegmentReader(dr); + LeafReader r = getOnlyLeafReader(dr); // ok assertNotNull(DocValues.getBinary(r, "foo")); @@ -202,7 +202,7 @@ public void testSortedSetField() throws Exception { doc.add(new SortedSetDocValuesField("foo", new BytesRef("bar"))); iw.addDocument(doc); DirectoryReader dr = DirectoryReader.open(iw); - LeafReader r = getOnlySegmentReader(dr); + LeafReader r = getOnlyLeafReader(dr); // ok assertNotNull(DocValues.getSortedSet(r, "foo")); @@ -237,7 +237,7 @@ public void testSortedNumericField() throws Exception { doc.add(new SortedNumericDocValuesField("foo", 3)); iw.addDocument(doc); DirectoryReader dr = DirectoryReader.open(iw); - LeafReader r = getOnlySegmentReader(dr); + LeafReader r = getOnlyLeafReader(dr); // ok assertNotNull(DocValues.getSortedNumeric(r, "foo")); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java b/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java index 44b5b75bbe30..2e0cbd901f53 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java @@ -58,7 +58,7 @@ public void testAddIndexes() throws IOException { doc.add(newStringField("id", "1", Field.Store.YES)); doc.add(new NumericDocValuesField("dv", 1)); w.addDocument(doc); - IndexReader r1 = w.getReader(); + DirectoryReader r1 = w.getReader(); w.close(); Directory d2 = newDirectory(); @@ -67,12 +67,12 @@ public void testAddIndexes() throws IOException { doc.add(newStringField("id", "2", Field.Store.YES)); doc.add(new NumericDocValuesField("dv", 2)); w.addDocument(doc); - IndexReader r2 = w.getReader(); + DirectoryReader r2 = w.getReader(); w.close(); Directory d3 = newDirectory(); w = new RandomIndexWriter(random(), d3); - w.addIndexes(SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r1)), SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r2))); + w.addIndexes(SlowCodecReaderWrapper.wrap(getOnlyLeafReader(r1)), SlowCodecReaderWrapper.wrap(getOnlyLeafReader(r2))); r1.close(); d1.close(); r2.close(); @@ -81,7 +81,7 @@ public void testAddIndexes() throws IOException { w.forceMerge(1); DirectoryReader r3 = w.getReader(); w.close(); - LeafReader sr = getOnlySegmentReader(r3); + LeafReader sr = getOnlyLeafReader(r3); assertEquals(2, sr.numDocs()); NumericDocValues docValues = sr.getNumericDocValues("dv"); assertNotNull(docValues); @@ -109,7 +109,7 @@ public void testMultiValuedDocValuesField() throws Exception { DirectoryReader r = w.getReader(); w.close(); - assertEquals(17, DocValues.getNumeric(getOnlySegmentReader(r), "field").get(0)); + assertEquals(17, DocValues.getNumeric(getOnlyLeafReader(r), "field").get(0)); r.close(); d.close(); } @@ -130,7 +130,7 @@ public void testDifferentTypedDocValuesField() throws Exception { DirectoryReader r = w.getReader(); w.close(); - assertEquals(17, DocValues.getNumeric(getOnlySegmentReader(r), "field").get(0)); + assertEquals(17, DocValues.getNumeric(getOnlyLeafReader(r), "field").get(0)); r.close(); d.close(); } @@ -150,7 +150,7 @@ public void testDifferentTypedDocValuesField2() throws Exception { }); DirectoryReader r = w.getReader(); - assertEquals(17, getOnlySegmentReader(r).getNumericDocValues("field").get(0)); + assertEquals(17, getOnlyLeafReader(r).getNumericDocValues("field").get(0)); r.close(); w.close(); d.close(); @@ -171,7 +171,7 @@ public void testLengthPrefixAcrossTwoPages() throws Exception { w.addDocument(doc); w.forceMerge(1); DirectoryReader r = w.getReader(); - BinaryDocValues s = DocValues.getSorted(getOnlySegmentReader(r), "field"); + BinaryDocValues s = DocValues.getSorted(getOnlyLeafReader(r), "field"); BytesRef bytes1 = s.get(0); assertEquals(bytes.length, bytes1.length); @@ -199,19 +199,18 @@ public void testDocValuesUnstored() throws IOException { writer.addDocument(doc); } DirectoryReader r = writer.getReader(); - LeafReader slow = SlowCompositeReaderWrapper.wrap(r); - FieldInfos fi = slow.getFieldInfos(); + FieldInfos fi = MultiFields.getMergedFieldInfos(r); FieldInfo dvInfo = fi.fieldInfo("dv"); assertTrue(dvInfo.getDocValuesType() != DocValuesType.NONE); - NumericDocValues dv = slow.getNumericDocValues("dv"); + NumericDocValues dv = MultiDocValues.getNumericValues(r, "dv"); for (int i = 0; i < 50; i++) { assertEquals(i, dv.get(i)); - Document d = slow.document(i); + Document d = r.document(i); // cannot use d.get("dv") due to another bug! assertNull(d.getField("dv")); assertEquals(Integer.toString(i), d.get("docId")); } - slow.close(); + r.close(); writer.close(); dir.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java b/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java index dc49db154e3d..2ef5824c5747 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDocsAndPositions.java @@ -335,7 +335,7 @@ public void testDocsEnumStart() throws Exception { doc.add(newStringField("foo", "bar", Field.Store.NO)); writer.addDocument(doc); DirectoryReader reader = writer.getReader(); - LeafReader r = getOnlySegmentReader(reader); + LeafReader r = getOnlyLeafReader(reader); PostingsEnum disi = TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, PostingsEnum.NONE); int docid = disi.docID(); assertEquals(-1, docid); @@ -360,7 +360,7 @@ public void testDocsAndPositionsEnumStart() throws Exception { doc.add(newTextField("foo", "bar", Field.Store.NO)); writer.addDocument(doc); DirectoryReader reader = writer.getReader(); - LeafReader r = getOnlySegmentReader(reader); + LeafReader r = getOnlyLeafReader(reader); PostingsEnum disi = r.postings(new Term("foo", "bar"), PostingsEnum.ALL); int docid = disi.docID(); assertEquals(-1, docid); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java index 489a185b326e..a814c4c351dc 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java @@ -284,7 +284,7 @@ public void testLUCENE_1590() throws Exception { TestUtil.checkIndex(dir); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(dir)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(dir)); FieldInfos fi = reader.getFieldInfos(); // f1 assertFalse("f1 should have no norms", fi.fieldInfo("f1").hasNorms()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java index b79e638ba078..62fe28aedb24 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java @@ -162,8 +162,8 @@ public void testCrazyReaderEquals() throws IOException { createRandomIndex(numdocs, leftWriter, seed); createRandomIndex(numdocs, rightWriter, seed); - leftReader = wrapReader(leftWriter.getReader(), false); - rightReader = wrapReader(rightWriter.getReader(), false); + leftReader = wrapReader(leftWriter.getReader()); + rightReader = wrapReader(rightWriter.getReader()); // check that our readers are valid TestUtil.checkReader(leftReader); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java b/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java index 36c9e0d80a1c..65cf84e6d9c9 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestExitableDirectoryReader.java @@ -78,8 +78,8 @@ public BytesRef next() throws IOException { } } - public TestReader(IndexReader reader) throws IOException { - super(SlowCompositeReaderWrapper.wrap(reader)); + public TestReader(LeafReader reader) throws IOException { + super(reader); } @Override @@ -107,6 +107,7 @@ public void testExitableFilterIndexReader() throws Exception { Document d3 = new Document(); d3.add(newTextField("default", "ones two four", Field.Store.YES)); writer.addDocument(d3); + writer.forceMerge(1); writer.commit(); writer.close(); @@ -122,43 +123,39 @@ public void testExitableFilterIndexReader() throws Exception { // Not checking the validity of the result, all we are bothered about in this test is the timing out. directoryReader = DirectoryReader.open(directory); exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(1000)); - reader = new TestReader(exitableDirectoryReader); + reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader)); searcher = new IndexSearcher(reader); searcher.search(query, 10); reader.close(); - exitableDirectoryReader.close(); // Set a really low timeout value (1 millisecond) and expect an Exception directoryReader = DirectoryReader.open(directory); exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(1)); - reader = new TestReader(exitableDirectoryReader); + reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader)); IndexSearcher slowSearcher = new IndexSearcher(reader); expectThrows(ExitingReaderException.class, () -> { slowSearcher.search(query, 10); }); reader.close(); - exitableDirectoryReader.close(); // Set maximum time out and expect the query to complete. // Not checking the validity of the result, all we are bothered about in this test is the timing out. directoryReader = DirectoryReader.open(directory); exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(Long.MAX_VALUE)); - reader = new TestReader(exitableDirectoryReader); + reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader)); searcher = new IndexSearcher(reader); searcher.search(query, 10); reader.close(); - exitableDirectoryReader.close(); // Set a negative time allowed and expect the query to complete (should disable timeouts) // Not checking the validity of the result, all we are bothered about in this test is the timing out. directoryReader = DirectoryReader.open(directory); exitableDirectoryReader = new ExitableDirectoryReader(directoryReader, new QueryTimeoutImpl(-189034L)); - reader = new TestReader(exitableDirectoryReader); + reader = new TestReader(getOnlyLeafReader(exitableDirectoryReader)); searcher = new IndexSearcher(reader); searcher.search(query, 10); reader.close(); - exitableDirectoryReader.close(); directory.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java index cad47a4e1268..e9f6fe28854e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java @@ -98,8 +98,8 @@ public int nextDoc() throws IOException { } } - public TestReader(IndexReader reader) throws IOException { - super(SlowCompositeReaderWrapper.wrap(reader)); + public TestReader(LeafReader reader) throws IOException { + super(reader); } @Override @@ -128,7 +128,7 @@ public void testFilterIndexReader() throws Exception { Document d3 = new Document(); d3.add(newTextField("default", "two four", Field.Store.YES)); writer.addDocument(d3); - + writer.forceMerge(1); writer.close(); Directory target = newDirectory(); @@ -137,7 +137,7 @@ public void testFilterIndexReader() throws Exception { ((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false); writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random()))); - try (LeafReader reader = new TestReader(DirectoryReader.open(directory))) { + try (LeafReader reader = new TestReader(getOnlyLeafReader(DirectoryReader.open(directory)))) { writer.addIndexes(SlowCodecReaderWrapper.wrap(reader)); } writer.close(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFlex.java b/lucene/core/src/test/org/apache/lucene/index/TestFlex.java index 3d716e13fefa..d91301fa6acd 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestFlex.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestFlex.java @@ -70,7 +70,7 @@ public void testTermOrd() throws Exception { w.addDocument(doc); w.forceMerge(1); DirectoryReader r = w.getReader(); - TermsEnum terms = getOnlySegmentReader(r).fields().terms("f").iterator(); + TermsEnum terms = getOnlyLeafReader(r).fields().terms("f").iterator(); assertTrue(terms.next() != null); try { assertEquals(0, terms.ord()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java index 6b22fd711e20..91dcb6ea6766 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java @@ -37,14 +37,15 @@ public class TestIndexReaderClose extends LuceneTestCase { public void testCloseUnderException() throws IOException { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), new MockAnalyzer(random()))); + writer.addDocument(new Document()); writer.commit(); writer.close(); final int iters = 1000 + 1 + random().nextInt(20); for (int j = 0; j < iters; j++) { DirectoryReader open = DirectoryReader.open(dir); final boolean throwOnClose = !rarely(); - LeafReader wrap = SlowCompositeReaderWrapper.wrap(open); - FilterLeafReader reader = new FilterLeafReader(wrap) { + LeafReader leaf = getOnlyLeafReader(open); + FilterLeafReader reader = new FilterLeafReader(leaf) { @Override protected void doClose() throws IOException { super.doClose(); @@ -87,54 +88,10 @@ protected void doClose() throws IOException { reader.close(); // call it again } assertEquals(0, count.get()); - wrap.close(); } dir.close(); } - public void testCoreListenerOnSlowCompositeReaderWrapper() throws IOException { - RandomIndexWriter w = new RandomIndexWriter(random(), newDirectory()); - final int numDocs = TestUtil.nextInt(random(), 1, 5); - for (int i = 0; i < numDocs; ++i) { - w.addDocument(new Document()); - if (random().nextBoolean()) { - w.commit(); - } - } - w.commit(); - w.close(); - - final IndexReader reader = DirectoryReader.open(w.w.getDirectory()); - final LeafReader leafReader = SlowCompositeReaderWrapper.wrap(reader); - - final int numListeners = TestUtil.nextInt(random(), 1, 10); - final List listeners = new ArrayList<>(); - AtomicInteger counter = new AtomicInteger(numListeners); - - for (int i = 0; i < numListeners; ++i) { - CountCoreListener listener = new CountCoreListener(counter, leafReader.getCoreCacheKey()); - listeners.add(listener); - leafReader.addCoreClosedListener(listener); - } - for (int i = 0; i < 100; ++i) { - leafReader.addCoreClosedListener(listeners.get(random().nextInt(listeners.size()))); - } - final int removed = random().nextInt(numListeners); - Collections.shuffle(listeners, random()); - for (int i = 0; i < removed; ++i) { - leafReader.removeCoreClosedListener(listeners.get(i)); - } - assertEquals(numListeners, counter.get()); - // make sure listeners are registered on the wrapped reader and that closing any of them has the same effect - if (random().nextBoolean()) { - reader.close(); - } else { - leafReader.close(); - } - assertEquals(removed, counter.get()); - w.w.getDirectory().close(); - } - public void testCoreListenerOnWrapperWithDifferentCacheKey() throws IOException { RandomIndexWriter w = new RandomIndexWriter(random(), newDirectory()); final int numDocs = TestUtil.nextInt(random(), 1, 5); @@ -144,13 +101,14 @@ public void testCoreListenerOnWrapperWithDifferentCacheKey() throws IOException w.commit(); } } + w.forceMerge(1); w.commit(); w.close(); final IndexReader reader = DirectoryReader.open(w.w.getDirectory()); // We explicitly define a different cache key final Object coreCacheKey = new Object(); - final LeafReader leafReader = new FilterLeafReader(SlowCompositeReaderWrapper.wrap(reader)) { + final LeafReader leafReader = new FilterLeafReader(getOnlyLeafReader(reader)) { @Override public Object getCoreCacheKey() { return coreCacheKey; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java index 7461618bc55a..2c3543e04386 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java @@ -692,7 +692,7 @@ public void testEmptyFieldNameTerms() throws IOException { writer.addDocument(doc); writer.close(); DirectoryReader reader = DirectoryReader.open(dir); - LeafReader subreader = getOnlySegmentReader(reader); + LeafReader subreader = getOnlyLeafReader(reader); TermsEnum te = subreader.fields().terms("").iterator(); assertEquals(new BytesRef("a"), te.next()); assertEquals(new BytesRef("b"), te.next()); @@ -713,7 +713,7 @@ public void testEmptyFieldNameWithEmptyTerm() throws IOException { writer.addDocument(doc); writer.close(); DirectoryReader reader = DirectoryReader.open(dir); - LeafReader subreader = getOnlySegmentReader(reader); + LeafReader subreader = getOnlyLeafReader(reader); TermsEnum te = subreader.fields().terms("").iterator(); assertEquals(new BytesRef(""), te.next()); assertEquals(new BytesRef("a"), te.next()); @@ -2549,7 +2549,7 @@ public void testEmptyNorm() throws Exception { w.commit(); w.close(); DirectoryReader r = DirectoryReader.open(d); - assertEquals(0, getOnlySegmentReader(r).getNormValues("foo").get(0)); + assertEquals(0, getOnlyLeafReader(r).getNormValues("foo").get(0)); r.close(); d.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java index a928fd2b1023..ff79e5e4113b 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java @@ -104,7 +104,7 @@ public TokenStreamComponents createComponents(String fieldName) { writer.forceMerge(1); writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(directory)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(directory)); this.searcher = newSearcher(reader); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java b/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java index 121e85c34755..5b70c38a7fa3 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestMultiDocValues.java @@ -56,7 +56,7 @@ public void testNumerics() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); NumericDocValues multi = MultiDocValues.getNumericValues(ir, "numbers"); @@ -91,7 +91,7 @@ public void testBinary() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); BinaryDocValues multi = MultiDocValues.getBinaryValues(ir, "bytes"); @@ -131,7 +131,7 @@ public void testSorted() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes"); @@ -173,7 +173,7 @@ public void testSortedWithLotsOfDups() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes"); @@ -214,7 +214,7 @@ public void testSortedSet() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); SortedSetDocValues multi = MultiDocValues.getSortedSetValues(ir, "bytes"); @@ -276,7 +276,7 @@ public void testSortedSetWithDups() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); SortedSetDocValues multi = MultiDocValues.getSortedSetValues(ir, "bytes"); @@ -337,7 +337,7 @@ public void testSortedNumeric() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); SortedNumericDocValues multi = MultiDocValues.getSortedNumericValues(ir, "nums"); @@ -388,7 +388,7 @@ public void testDocsWithField() throws Exception { DirectoryReader ir = iw.getReader(); iw.forceMerge(1); DirectoryReader ir2 = iw.getReader(); - LeafReader merged = getOnlySegmentReader(ir2); + LeafReader merged = getOnlyLeafReader(ir2); iw.close(); Bits multi = MultiDocValues.getDocsWithField(ir, "numbers"); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java b/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java index a82444bf9ea7..bc14cb8a690d 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java @@ -80,7 +80,7 @@ public void testSimpleSkip() throws IOException { writer.forceMerge(1); writer.close(); - LeafReader reader = getOnlySegmentReader(DirectoryReader.open(dir)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(dir)); for (int i = 0; i < 2; i++) { counter = 0; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java index 562cefb77740..45db69a5ef10 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestNorms.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestNorms.java @@ -111,8 +111,8 @@ public void testCustomEncoder() throws Exception { public void testMaxByteNorms() throws IOException { Directory dir = newFSDirectory(createTempDir("TestNorms.testMaxByteNorms")); buildIndex(dir); - LeafReader open = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir)); - NumericDocValues normValues = open.getNormValues(byteTestField); + DirectoryReader open = DirectoryReader.open(dir); + NumericDocValues normValues = MultiDocValues.getNormValues(open, byteTestField); assertNotNull(normValues); for (int i = 0; i < open.maxDoc(); i++) { Document document = open.document(i); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java index 727f6ff7fcbe..15ecc0f8b008 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java @@ -238,16 +238,14 @@ public void testUpdatesAndDeletes() throws Exception { writer.close(); } - LeafReader slow = SlowCompositeReaderWrapper.wrap(reader); - - Bits liveDocs = slow.getLiveDocs(); + Bits liveDocs = MultiFields.getLiveDocs(reader); boolean[] expectedLiveDocs = new boolean[] { true, false, false, true, true, true }; for (int i = 0; i < expectedLiveDocs.length; i++) { assertEquals(expectedLiveDocs[i], liveDocs.get(i)); } long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17}; - NumericDocValues ndv = slow.getNumericDocValues("val"); + NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "val"); for (int i = 0; i < expectedValues.length; i++) { assertEquals(expectedValues[i], ndv.get(i)); } @@ -460,10 +458,9 @@ public DocValuesFormat getDocValuesFormatForField(String field) { final DirectoryReader reader = DirectoryReader.open(dir); - LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - NumericDocValues ndv = r.getNumericDocValues("ndv"); - SortedDocValues sdv = r.getSortedDocValues("sorted"); - for (int i = 0; i < r.maxDoc(); i++) { + NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "ndv"); + SortedDocValues sdv = MultiDocValues.getSortedValues(reader, "sorted"); + for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(17, ndv.get(i)); final BytesRef term = sdv.get(i); assertEquals(new BytesRef("value"), term); @@ -491,9 +488,8 @@ public void testUpdateSameDocMultipleTimes() throws Exception { writer.close(); final DirectoryReader reader = DirectoryReader.open(dir); - final LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - NumericDocValues ndv = r.getNumericDocValues("ndv"); - for (int i = 0; i < r.maxDoc(); i++) { + NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "ndv"); + for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(3, ndv.get(i)); } reader.close(); @@ -592,9 +588,8 @@ public void testUpdateDocumentByMultipleTerms() throws Exception { writer.close(); final DirectoryReader reader = DirectoryReader.open(dir); - final LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - NumericDocValues ndv = r.getNumericDocValues("ndv"); - for (int i = 0; i < r.maxDoc(); i++) { + NumericDocValues ndv = MultiDocValues.getNumericValues(reader, "ndv"); + for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(3, ndv.get(i)); } reader.close(); @@ -806,7 +801,7 @@ public void testUpdateSegmentWithNoDocValues2() throws Exception { writer.close(); reader = DirectoryReader.open(dir); - LeafReader ar = getOnlySegmentReader(reader); + LeafReader ar = getOnlyLeafReader(reader); assertEquals(DocValuesType.NUMERIC, ar.getFieldInfos().fieldInfo("foo").getDocValuesType()); IndexSearcher searcher = new IndexSearcher(reader); TopFieldDocs td; @@ -1103,9 +1098,8 @@ public DocValuesFormat getDocValuesFormatForField(String field) { writer.close(); DirectoryReader reader = DirectoryReader.open(dir); - LeafReader r = SlowCompositeReaderWrapper.wrap(reader); - NumericDocValues f1 = r.getNumericDocValues("f1"); - NumericDocValues f2 = r.getNumericDocValues("f2"); + NumericDocValues f1 = MultiDocValues.getNumericValues(reader, "f1"); + NumericDocValues f2 = MultiDocValues.getNumericValues(reader, "f2"); assertEquals(12L, f1.get(0)); assertEquals(13L, f2.get(0)); assertEquals(17L, f1.get(1)); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java index 83dfd193b91f..bc3c3e83be30 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java @@ -66,7 +66,7 @@ public void testOmitNorms() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitsNorms()); assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitsNorms()); @@ -120,7 +120,7 @@ public void testMixedMerge() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitsNorms()); assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitsNorms()); @@ -168,7 +168,7 @@ public void testMixedRAM() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); assertTrue("OmitNorms field bit should not be set.", !fi.fieldInfo("f1").omitsNorms()); assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitsNorms()); @@ -297,7 +297,7 @@ NumericDocValues getNorms(String field, Field f1, Field f2) throws IOException { // fully merge and validate MultiNorms against single segment. riw.forceMerge(1); DirectoryReader ir2 = riw.getReader(); - NumericDocValues norms2 = getOnlySegmentReader(ir2).getNormValues(field); + NumericDocValues norms2 = getOnlyLeafReader(ir2).getNormValues(field); if (norms1 == null) { assertNull(norms2); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java index 38c725118034..f5a74b56a50e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java @@ -153,7 +153,7 @@ public void testPositions() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); // docs + docs = docs assertEquals(IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java index 807c704ca0ff..3c12a0db4509 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java @@ -105,7 +105,7 @@ public void testOmitTermFreqAndPositions() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions()); @@ -157,7 +157,7 @@ public void testMixedMerge() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions()); @@ -200,7 +200,7 @@ public void testMixedRAM() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); assertEquals("OmitTermFreqAndPositions field bit should not be set.", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java b/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java index e0fab18573b3..1f9ff118d689 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestOrdinalMap.java @@ -83,13 +83,12 @@ public void testRamBytesUsed() throws IOException { } iw.commit(); DirectoryReader r = iw.getReader(); - LeafReader ar = SlowCompositeReaderWrapper.wrap(r); - SortedDocValues sdv = ar.getSortedDocValues("sdv"); + SortedDocValues sdv = MultiDocValues.getSortedValues(r, "sdv"); if (sdv instanceof MultiSortedDocValues) { OrdinalMap map = ((MultiSortedDocValues) sdv).mapping; assertEquals(RamUsageTester.sizeOf(map, ORDINAL_MAP_ACCUMULATOR), map.ramBytesUsed()); } - SortedSetDocValues ssdv = ar.getSortedSetDocValues("ssdv"); + SortedSetDocValues ssdv = MultiDocValues.getSortedSetValues(r, "ssdv"); if (ssdv instanceof MultiSortedSetDocValues) { OrdinalMap map = ((MultiSortedSetDocValues) ssdv).mapping; assertEquals(RamUsageTester.sizeOf(map, ORDINAL_MAP_ACCUMULATOR), map.ramBytesUsed()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java index 7078380c9381..166c9e4ded9b 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java @@ -18,6 +18,8 @@ import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; @@ -276,32 +278,6 @@ public void testIncompatibleIndexes2() throws IOException { dir2.close(); } - public void testIncompatibleIndexes3() throws IOException { - Directory dir1 = getDir1(random()); - Directory dir2 = getDir2(random()); - - CompositeReader ir1 = new MultiReader(DirectoryReader.open(dir1), SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1))), - ir2 = new MultiReader(DirectoryReader.open(dir2), DirectoryReader.open(dir2)); - CompositeReader[] readers = new CompositeReader[] {ir1, ir2}; - - expectThrows(IllegalArgumentException.class, () -> { - new ParallelCompositeReader(readers); - }); - - expectThrows(IllegalArgumentException.class, () -> { - new ParallelCompositeReader(random().nextBoolean(), readers, readers); - }); - - assertEquals(1, ir1.getRefCount()); - assertEquals(1, ir2.getRefCount()); - ir1.close(); - ir2.close(); - assertEquals(0, ir1.getRefCount()); - assertEquals(0, ir2.getRefCount()); - dir1.close(); - dir2.close(); - } - public void testIgnoreStoredFields() throws IOException { Directory dir1 = getDir1(random()); Directory dir2 = getDir2(random()); @@ -317,7 +293,7 @@ public void testIgnoreStoredFields() throws IOException { assertNull(pr.document(0).get("f3")); assertNull(pr.document(0).get("f4")); // check that fields are there - LeafReader slow = SlowCompositeReaderWrapper.wrap(pr); + Fields slow = MultiFields.getFields(pr); assertNotNull(slow.terms("f1")); assertNotNull(slow.terms("f2")); assertNotNull(slow.terms("f3")); @@ -333,7 +309,7 @@ public void testIgnoreStoredFields() throws IOException { assertNull(pr.document(0).get("f3")); assertNull(pr.document(0).get("f4")); // check that fields are there - slow = SlowCompositeReaderWrapper.wrap(pr); + slow = MultiFields.getFields(pr); assertNull(slow.terms("f1")); assertNull(slow.terms("f2")); assertNotNull(slow.terms("f3")); @@ -349,7 +325,7 @@ public void testIgnoreStoredFields() throws IOException { assertNull(pr.document(0).get("f3")); assertNull(pr.document(0).get("f4")); // check that fields are there - slow = SlowCompositeReaderWrapper.wrap(pr); + slow = MultiFields.getFields(pr); assertNull(slow.terms("f1")); assertNull(slow.terms("f2")); assertNotNull(slow.terms("f3")); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java index 31aa603b4741..f7f401fca9a1 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java @@ -63,8 +63,8 @@ public void testQueries() throws Exception { public void testFieldNames() throws Exception { Directory dir1 = getDir1(random()); Directory dir2 = getDir2(random()); - ParallelLeafReader pr = new ParallelLeafReader(SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)), - SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2))); + ParallelLeafReader pr = new ParallelLeafReader(getOnlyLeafReader(DirectoryReader.open(dir1)), + getOnlyLeafReader(DirectoryReader.open(dir2))); FieldInfos fieldInfos = pr.getFieldInfos(); assertEquals(4, fieldInfos.size()); assertNotNull(fieldInfos.fieldInfo("f1")); @@ -81,8 +81,8 @@ public void testRefCounts1() throws IOException { Directory dir2 = getDir2(random()); LeafReader ir1, ir2; // close subreaders, ParallelReader will not change refCounts, but close on its own close - ParallelLeafReader pr = new ParallelLeafReader(ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)), - ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2))); + ParallelLeafReader pr = new ParallelLeafReader(ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)), + ir2 = getOnlyLeafReader(DirectoryReader.open(dir2))); // check RefCounts assertEquals(1, ir1.getRefCount()); @@ -97,8 +97,8 @@ public void testRefCounts1() throws IOException { public void testRefCounts2() throws IOException { Directory dir1 = getDir1(random()); Directory dir2 = getDir2(random()); - LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)); - LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)); + LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)); + LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2)); // don't close subreaders, so ParallelReader will increment refcounts ParallelLeafReader pr = new ParallelLeafReader(false, ir1, ir2); // check RefCounts @@ -117,7 +117,7 @@ public void testRefCounts2() throws IOException { public void testCloseInnerReader() throws Exception { Directory dir1 = getDir1(random()); - LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)); + LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)); // with overlapping ParallelLeafReader pr = new ParallelLeafReader(true, @@ -149,8 +149,8 @@ public void testIncompatibleIndexes() throws IOException { w2.addDocument(d3); w2.close(); - LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)); - LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)); + LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)); + LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2)); // indexes don't have the same number of documents expectThrows(IllegalArgumentException.class, () -> { @@ -175,8 +175,8 @@ public void testIncompatibleIndexes() throws IOException { public void testIgnoreStoredFields() throws IOException { Directory dir1 = getDir1(random()); Directory dir2 = getDir2(random()); - LeafReader ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)); - LeafReader ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2)); + LeafReader ir1 = getOnlyLeafReader(DirectoryReader.open(dir1)); + LeafReader ir2 = getOnlyLeafReader(DirectoryReader.open(dir2)); // with overlapping ParallelLeafReader pr = new ParallelLeafReader(false, @@ -276,8 +276,8 @@ private IndexSearcher parallel(Random random) throws IOException { dir1 = getDir1(random); dir2 = getDir2(random); ParallelLeafReader pr = new ParallelLeafReader( - SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir1)), - SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir2))); + getOnlyLeafReader(DirectoryReader.open(dir1)), + getOnlyLeafReader(DirectoryReader.open(dir2))); TestUtil.checkReader(pr); return newSearcher(pr); } @@ -293,6 +293,7 @@ private Directory getDir1(Random random) throws IOException { d2.add(newTextField("f1", "v2", Field.Store.YES)); d2.add(newTextField("f2", "v2", Field.Store.YES)); w1.addDocument(d2); + w1.forceMerge(1); w1.close(); return dir1; } @@ -308,6 +309,7 @@ private Directory getDir2(Random random) throws IOException { d4.add(newTextField("f3", "v2", Field.Store.YES)); d4.add(newTextField("f4", "v2", Field.Store.YES)); w2.addDocument(d4); + w2.forceMerge(1); w2.close(); return dir2; } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java index 373a125eeb6f..61c84dccab88 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java @@ -50,15 +50,7 @@ public void testEmptyIndex() throws IOException { IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(new MockAnalyzer(random()))); - ParallelLeafReader apr = new ParallelLeafReader( - SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd1)), - SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd2))); - - // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum) - iwOut.addIndexes(SlowCodecReaderWrapper.wrap(apr)); - iwOut.forceMerge(1); - - // 2nd try with a readerless parallel reader + // add a readerless parallel reader iwOut.addIndexes(SlowCodecReaderWrapper.wrap(new ParallelLeafReader())); iwOut.forceMerge(1); @@ -136,16 +128,18 @@ public void testEmptyIndexWithVectors() throws IOException { Directory rdOut = newDirectory(); IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(new MockAnalyzer(random()))); - final DirectoryReader reader1, reader2; - ParallelLeafReader pr = new ParallelLeafReader( - SlowCompositeReaderWrapper.wrap(reader1 = DirectoryReader.open(rd1)), - SlowCompositeReaderWrapper.wrap(reader2 = DirectoryReader.open(rd2))); + DirectoryReader reader1 = DirectoryReader.open(rd1); + DirectoryReader reader2 = DirectoryReader.open(rd2); + ParallelLeafReader pr = new ParallelLeafReader(false, + getOnlyLeafReader(reader1), + getOnlyLeafReader(reader2)); // When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter) iwOut.addIndexes(SlowCodecReaderWrapper.wrap(pr)); - // ParallelReader closes any IndexReader you added to it: pr.close(); + reader1.close(); + reader2.close(); // assert subreaders were closed assertEquals(0, reader1.getRefCount()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java index c51fd2d7280c..a83c5492defc 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelTermEnum.java @@ -59,8 +59,8 @@ public void setUp() throws Exception { iw2.close(); - this.ir1 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd1)); - this.ir2 = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd2)); + this.ir1 = getOnlyLeafReader(DirectoryReader.open(rd1)); + this.ir2 = getOnlyLeafReader(DirectoryReader.open(rd2)); } @Override diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java index 3fa213109a67..eca293ed46f8 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java @@ -85,7 +85,7 @@ public void testPayloadFieldBit() throws Exception { // flush writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(ram)); FieldInfos fi = reader.getFieldInfos(); assertFalse("Payload field bit should not be set.", fi.fieldInfo("f1").hasPayloads()); assertTrue("Payload field bit should be set.", fi.fieldInfo("f2").hasPayloads()); @@ -112,7 +112,7 @@ public void testPayloadFieldBit() throws Exception { // flush writer.close(); - reader = getOnlySegmentReader(DirectoryReader.open(ram)); + reader = getOnlyLeafReader(DirectoryReader.open(ram)); fi = reader.getFieldInfos(); assertFalse("Payload field bit should not be set.", fi.fieldInfo("f1").hasPayloads()); assertTrue("Payload field bit should be set.", fi.fieldInfo("f2").hasPayloads()); @@ -603,8 +603,9 @@ public void testMixupDocs() throws Exception { field.setTokenStream(ts); writer.addDocument(doc); DirectoryReader reader = writer.getReader(); - LeafReader sr = SlowCompositeReaderWrapper.wrap(reader); - PostingsEnum de = sr.postings(new Term("field", "withPayload"), PostingsEnum.PAYLOADS); + TermsEnum te = MultiFields.getFields(reader).terms("field").iterator(); + assertTrue(te.seekExact(new BytesRef("withPayload"))); + PostingsEnum de = te.postings(null, PostingsEnum.PAYLOADS); de.nextDoc(); de.nextPosition(); assertEquals(new BytesRef("test"), de.getPayload()); @@ -637,7 +638,7 @@ public void testMixupMultiValued() throws Exception { doc.add(field3); writer.addDocument(doc); DirectoryReader reader = writer.getReader(); - SegmentReader sr = getOnlySegmentReader(reader); + LeafReader sr = getOnlyLeafReader(reader); PostingsEnum de = sr.postings(new Term("field", "withPayload"), PostingsEnum.PAYLOADS); de.nextDoc(); de.nextPosition(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java index 9b18f0298ee3..094623491152 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java @@ -182,7 +182,7 @@ public void testIllegalDimChangeViaAddIndexesCodecReader() throws Exception { w2.addDocument(doc); DirectoryReader r = DirectoryReader.open(dir); IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - w2.addIndexes(new CodecReader[] {getOnlySegmentReader(r)}); + w2.addIndexes(new CodecReader[] {(CodecReader) getOnlyLeafReader(r)}); }); assertEquals("cannot change point dimension count from 2 to 1 for field=\"dim\"", expected.getMessage()); @@ -331,7 +331,7 @@ public void testIllegalNumBytesChangeViaAddIndexesCodecReader() throws Exception w2.addDocument(doc); DirectoryReader r = DirectoryReader.open(dir); IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - w2.addIndexes(new CodecReader[] {getOnlySegmentReader(r)}); + w2.addIndexes(new CodecReader[] {(CodecReader) getOnlyLeafReader(r)}); }); assertEquals("cannot change point numBytes from 6 to 4 for field=\"dim\"", expected.getMessage()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java b/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java index aff0bd9f06ff..b21cb2384f0f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java @@ -375,10 +375,9 @@ public void testWithUnindexedFields() throws Exception { riw.addDocument(doc); } CompositeReader ir = riw.getReader(); - LeafReader slow = SlowCompositeReaderWrapper.wrap(ir); - FieldInfos fis = slow.getFieldInfos(); + FieldInfos fis = MultiFields.getMergedFieldInfos(ir); assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, fis.fieldInfo("foo").getIndexOptions()); - slow.close(); + ir.close(); ir.close(); riw.close(); dir.close(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java b/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java index 6a1ab3a4018f..401254b8d66b 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestReaderClosed.java @@ -32,7 +32,7 @@ import org.apache.lucene.util.TestUtil; public class TestReaderClosed extends LuceneTestCase { - private IndexReader reader; + private DirectoryReader reader; private Directory dir; @Override @@ -54,6 +54,7 @@ public void setUp() throws Exception { field.setStringValue(TestUtil.randomUnicodeString(random(), 10)); writer.addDocument(doc); } + writer.forceMerge(1); reader = writer.getReader(); writer.close(); } @@ -77,8 +78,7 @@ public void test() throws Exception { // LUCENE-3800 public void testReaderChaining() throws Exception { assertTrue(reader.getRefCount() > 0); - IndexReader wrappedReader = SlowCompositeReaderWrapper.wrap(reader); - wrappedReader = new ParallelLeafReader((LeafReader) wrappedReader); + LeafReader wrappedReader = new ParallelLeafReader(getOnlyLeafReader(reader)); IndexSearcher searcher = newSearcher(wrappedReader); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java b/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java index 308c48ee77d9..88b177d68dec 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestReaderWrapperDVTypeCheck.java @@ -45,7 +45,7 @@ public void testNoDVFieldOnSegment() throws IOException{ { final Random indexRandom = new Random(seed); final int docs; - docs = indexRandom.nextInt(4); + docs = TestUtil.nextInt(indexRandom, 1, 4); // System.out.println("docs:"+docs); for(int i=0; i< docs; i++){ @@ -68,12 +68,13 @@ public void testNoDVFieldOnSegment() throws IOException{ iw.commit(); } } + iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); // System.out.println("sdv:"+ sdvExist+ " ssdv:"+ssdvExist+", segs: "+reader.leaves().size() +", "+reader.leaves()); iw.close(); - final LeafReader wrapper = SlowCompositeReaderWrapper.wrap(reader); + final LeafReader wrapper = getOnlyLeafReader(reader); { //final Random indexRandom = new Random(seed); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java index fc708c2e7862..1e85e14c9de2 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermEnum.java @@ -79,7 +79,7 @@ public void testPrevTermAtEnd() throws IOException .setCodec(TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat()))); addDoc(writer, "aaa bbb"); writer.close(); - SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(dir)); + LeafReader reader = getOnlyLeafReader(DirectoryReader.open(dir)); TermsEnum terms = reader.fields().terms("content").iterator(); assertNotNull(terms.next()); assertEquals("aaa", terms.term().utf8ToString()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java b/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java index d423616c0980..606a11aff65a 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestStressAdvance.java @@ -74,7 +74,7 @@ public void testStressAdvance() throws Exception { bDocIDs.add(docID); } } - final TermsEnum te = getOnlySegmentReader(r).fields().terms("field").iterator(); + final TermsEnum te = getOnlyLeafReader(r).fields().terms("field").iterator(); PostingsEnum de = null; for(int iter2=0;iter2<10;iter2++) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java b/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java index aa2ca247fd87..b074f815f611 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermsEnum.java @@ -732,7 +732,7 @@ public void testIntersectBasic() throws Exception { w.forceMerge(1); DirectoryReader r = w.getReader(); w.close(); - LeafReader sub = getOnlySegmentReader(r); + LeafReader sub = getOnlyLeafReader(r); Terms terms = sub.fields().terms("field"); Automaton automaton = new RegExp(".*", RegExp.NONE).toAutomaton(); CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false); @@ -786,7 +786,7 @@ public void testIntersectStartTerm() throws Exception { w.forceMerge(1); DirectoryReader r = w.getReader(); w.close(); - LeafReader sub = getOnlySegmentReader(r); + LeafReader sub = getOnlyLeafReader(r); Terms terms = sub.fields().terms("field"); Automaton automaton = new RegExp(".*d", RegExp.NONE).toAutomaton(); @@ -840,7 +840,7 @@ public void testIntersectEmptyString() throws Exception { w.forceMerge(1); DirectoryReader r = w.getReader(); w.close(); - LeafReader sub = getOnlySegmentReader(r); + LeafReader sub = getOnlyLeafReader(r); Terms terms = sub.fields().terms("field"); Automaton automaton = new RegExp(".*", RegExp.NONE).toAutomaton(); // accept ALL diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java index 7a9bb4e6e19d..79c32d38bc93 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; @@ -157,9 +156,10 @@ public void setUp() throws Exception { writer.addDocument(d4); } - r = SlowCompositeReaderWrapper.wrap(writer.getReader()); + writer.forceMerge(1); + r = getOnlyLeafReader(writer.getReader()); writer.close(); - s = newSearcher(r); + s = new IndexSearcher(r); s.setSimilarity(sim); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java index f28997562ea9..cc6606f038f6 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java @@ -87,7 +87,7 @@ public static void beforeClass() throws Exception { iw.forceMerge(1); iw.close(); r = DirectoryReader.open(dir); - reader = getOnlySegmentReader(r); + reader = getOnlyLeafReader(r); searcher = new IndexSearcher(reader); searcher.setSimilarity(new ClassicSimilarity() { @Override diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java index aa0c86dde9ea..5d0096dd5bc5 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseEnum.java @@ -48,8 +48,8 @@ public void testOneDocument() throws IOException { DirectoryReader ir = DirectoryReader.open(writer); writer.close(); - PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS); - PostingsEnum p2 = getOnlySegmentReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS); + PostingsEnum p1 = getOnlyLeafReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS); + PostingsEnum p2 = getOnlyLeafReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS); PostingsEnum union = new MultiPhraseQuery.UnionPostingsEnum(Arrays.asList(p1, p2)); assertEquals(-1, union.docID()); @@ -90,8 +90,8 @@ public void testSomeDocuments() throws IOException { DirectoryReader ir = DirectoryReader.open(writer); writer.close(); - PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS); - PostingsEnum p2 = getOnlySegmentReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS); + PostingsEnum p1 = getOnlyLeafReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS); + PostingsEnum p2 = getOnlyLeafReader(ir).postings(new Term("field", "bar"), PostingsEnum.POSITIONS); PostingsEnum union = new MultiPhraseQuery.UnionPostingsEnum(Arrays.asList(p1, p2)); assertEquals(-1, union.docID()); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java index cf779034d811..0d6cb36f0ac3 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java @@ -95,7 +95,7 @@ public int getPositionIncrementGap(String fieldName) { reader = writer.getReader(); writer.close(); - searcher = newSearcher(reader); + searcher = new IndexSearcher(reader); } @Override @@ -123,7 +123,7 @@ public void testBarelyCloseEnough() throws Exception { query = new PhraseQuery(3, "field", "one", "five"); ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); - QueryUtils.check(random(), query,searcher); + QueryUtils.check(random(), query, searcher); } /** diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java index b7ae42a54bca..227b15d91d4a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java @@ -36,9 +36,7 @@ import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; -import org.apache.lucene.search.spans.MultiSpansWrapper; import org.apache.lucene.search.spans.SpanCollector; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanQuery; @@ -225,7 +223,7 @@ public void testPayloadsPos0() throws Exception { writer.addDocument(doc); final IndexReader readerFromWriter = writer.getReader(); - LeafReader r = SlowCompositeReaderWrapper.wrap(readerFromWriter); + LeafReader r = getOnlyLeafReader(readerFromWriter); PostingsEnum tp = r.postings(new Term("content", "a"), PostingsEnum.ALL); @@ -241,7 +239,7 @@ public void testPayloadsPos0() throws Exception { // only one doc has "a" assertEquals(DocIdSetIterator.NO_MORE_DOCS, tp.nextDoc()); - IndexSearcher is = newSearcher(readerFromWriter); + IndexSearcher is = newSearcher(getOnlyLeafReader(readerFromWriter)); SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a")); SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k")); @@ -254,7 +252,7 @@ public void testPayloadsPos0() throws Exception { System.out.println("\ngetPayloadSpans test"); } PayloadSpanCollector collector = new PayloadSpanCollector(); - Spans pspans = MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS); + Spans pspans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); while (pspans.nextDoc() != Spans.NO_MORE_DOCS) { while (pspans.nextStartPosition() != Spans.NO_MORE_POSITIONS) { if (VERBOSE) { @@ -276,7 +274,7 @@ public void testPayloadsPos0() throws Exception { assertEquals(8, count); // System.out.println("\ngetSpans test"); - Spans spans = MultiSpansWrapper.wrap(is.getIndexReader(), snq); + Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); count = 0; sawZero = false; while (spans.nextDoc() != Spans.NO_MORE_DOCS) { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java b/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java index 9278934a4929..c332c10406c1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSimilarityProvider.java @@ -20,13 +20,13 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; @@ -75,10 +75,9 @@ public void tearDown() throws Exception { public void testBasics() throws Exception { // sanity check of norms writer // TODO: generalize - LeafReader slow = SlowCompositeReaderWrapper.wrap(reader); - NumericDocValues fooNorms = slow.getNormValues("foo"); - NumericDocValues barNorms = slow.getNormValues("bar"); - for (int i = 0; i < slow.maxDoc(); i++) { + NumericDocValues fooNorms = MultiDocValues.getNormValues(reader, "foo"); + NumericDocValues barNorms = MultiDocValues.getNormValues(reader, "bar"); + for (int i = 0; i < reader.maxDoc(); i++) { assertFalse(fooNorms.get(i) == barNorms.get(i)); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java index 30061b5ca1f1..2cdcba4db2c6 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java @@ -29,7 +29,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.store.Directory; @@ -55,11 +54,11 @@ public void setUp() throws Exception { .setSimilarity(new ClassicSimilarity())); for (int i = 0; i < values.length; i++) { Document doc = new Document(); - doc - .add(newTextField(FIELD, values[i], Field.Store.YES)); + doc.add(newTextField(FIELD, values[i], Field.Store.YES)); writer.addDocument(doc); } - indexReader = SlowCompositeReaderWrapper.wrap(writer.getReader()); + writer.forceMerge(1); + indexReader = getOnlyLeafReader(writer.getReader()); writer.close(); indexSearcher = newSearcher(indexReader); indexSearcher.setSimilarity(new ClassicSimilarity()); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java b/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java index 9c7ada8c68f7..c656b8559305 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestUsageTrackingFilterCachingPolicy.java @@ -16,10 +16,15 @@ */ package org.apache.lucene.search; +import org.apache.lucene.document.Document; import org.apache.lucene.document.IntPoint; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; +import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase { @@ -36,7 +41,15 @@ public void testNeverCacheMatchAll() throws Exception { for (int i = 0; i < 1000; ++i) { policy.onUse(q); } - assertFalse(policy.shouldCache(q, SlowCompositeReaderWrapper.wrap(new MultiReader()).getContext())); + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); + w.addDocument(new Document()); + IndexReader r = DirectoryReader.open(w); + assertFalse(policy.shouldCache(q, getOnlyLeafReader(r).getContext())); + + r.close(); + w.close(); + dir.close(); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java index 052457b63ce7..b4435e7c1407 100644 --- a/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java @@ -116,8 +116,9 @@ public static void beforeClass() throws Exception { field("first", "bubba"), field("last", "jones") })); reader = writer.getReader(); + writer.forceMerge(1); writer.close(); - searcher = newSearcher(reader); + searcher = new IndexSearcher(getOnlyLeafReader(reader)); } @AfterClass @@ -251,7 +252,7 @@ public void testSpans0() throws Exception { SpanQuery q = new SpanOrQuery(q1, new FieldMaskingSpanQuery(q2, "gender")); check(q, new int[] { 0, 1, 2, 3, 4 }); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(span, 0,0,1); assertNext(span, 1,0,1); assertNext(span, 1,1,2); @@ -273,8 +274,8 @@ public void testSpans1() throws Exception { check(qA, new int[] { 0, 1, 2, 4 }); check(qB, new int[] { 0, 1, 2, 4 }); - Spans spanA = MultiSpansWrapper.wrap(searcher.getIndexReader(), qA); - Spans spanB = MultiSpansWrapper.wrap(searcher.getIndexReader(), qB); + Spans spanA = qA.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); + Spans spanB = qB.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); while (spanA.nextDoc() != Spans.NO_MORE_DOCS) { assertNotSame("spanB not still going", Spans.NO_MORE_DOCS, spanB.nextDoc()); @@ -299,7 +300,7 @@ public void testSpans2() throws Exception { new FieldMaskingSpanQuery(qB, "id") }, -1, false ); check(q, new int[] { 0, 1, 2, 3 }); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(span, 0,0,1); assertNext(span, 1,1,2); assertNext(span, 2,0,1); diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java index b101c6121624..f297f33676d1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java +++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java @@ -60,9 +60,10 @@ public void setUp() throws Exception { doc.add(newTextField(FIELD, docFields[i], Field.Store.NO)); writer.addDocument(doc); } + writer.forceMerge(1); reader = writer.getReader(); writer.close(); - searcher = newSearcher(reader); + searcher = newSearcher(getOnlyLeafReader(reader)); } protected String[] docFields = { @@ -118,7 +119,7 @@ public String s(int doc, int start, int end) { public void testNearSpansNext() throws Exception { SpanNearQuery q = makeQuery(); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(span,0,0,3); assertNext(span,1,0,4); assertFinished(span); @@ -131,7 +132,7 @@ public void testNearSpansNext() throws Exception { */ public void testNearSpansAdvanceLikeNext() throws Exception { SpanNearQuery q = makeQuery(); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals(0, span.advance(0)); assertEquals(0, span.nextStartPosition()); assertEquals(s(0,0,3), s(span)); @@ -143,7 +144,7 @@ public void testNearSpansAdvanceLikeNext() throws Exception { public void testNearSpansNextThenAdvance() throws Exception { SpanNearQuery q = makeQuery(); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc()); assertEquals(0, span.nextStartPosition()); assertEquals(s(0,0,3), s(span)); @@ -155,7 +156,7 @@ public void testNearSpansNextThenAdvance() throws Exception { public void testNearSpansNextThenAdvancePast() throws Exception { SpanNearQuery q = makeQuery(); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc()); assertEquals(0, span.nextStartPosition()); assertEquals(s(0,0,3), s(span)); @@ -164,13 +165,13 @@ public void testNearSpansNextThenAdvancePast() throws Exception { public void testNearSpansAdvancePast() throws Exception { SpanNearQuery q = makeQuery(); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals(Spans.NO_MORE_DOCS, span.advance(2)); } public void testNearSpansAdvanceTo0() throws Exception { SpanNearQuery q = makeQuery(); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals(0, span.advance(0)); assertEquals(0, span.nextStartPosition()); assertEquals(s(0,0,3), s(span)); @@ -178,7 +179,7 @@ public void testNearSpansAdvanceTo0() throws Exception { public void testNearSpansAdvanceTo1() throws Exception { SpanNearQuery q = makeQuery(); - Spans span = MultiSpansWrapper.wrap(searcher.getIndexReader(), q); + Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals(1, span.advance(1)); assertEquals(0, span.nextStartPosition()); assertEquals(s(1,0,4), s(span)); @@ -217,7 +218,7 @@ public void testOrderedSpanIteration() throws Exception { new SpanOrQuery(new SpanTermQuery(new Term(FIELD, "w1")), new SpanTermQuery(new Term(FIELD, "w2"))), new SpanTermQuery(new Term(FIELD, "w4")) }, 10, true); - Spans spans = MultiSpansWrapper.wrap(reader, q); + Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans,0,0,4); assertNext(spans,0,1,4); assertFinished(spans); @@ -227,7 +228,7 @@ public void testOrderedSpanIterationSameTerms1() throws Exception { SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{ new SpanTermQuery(new Term(FIELD, "t1")), new SpanTermQuery(new Term(FIELD, "t2")) }, 1, true); - Spans spans = MultiSpansWrapper.wrap(reader, q); + Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans,4,0,2); assertFinished(spans); } @@ -236,7 +237,7 @@ public void testOrderedSpanIterationSameTerms2() throws Exception { SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{ new SpanTermQuery(new Term(FIELD, "t2")), new SpanTermQuery(new Term(FIELD, "t1")) }, 1, true); - Spans spans = MultiSpansWrapper.wrap(reader, q); + Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans,4,1,4); assertNext(spans,4,2,4); assertFinished(spans); @@ -260,7 +261,7 @@ public void testGaps() throws Exception { .addGap(1) .addClause(new SpanTermQuery(new Term(FIELD, "w2"))) .build(); - Spans spans = MultiSpansWrapper.wrap(reader, q); + Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans, 1, 0, 3); assertNext(spans, 2, 0, 3); assertFinished(spans); @@ -273,7 +274,7 @@ public void testGaps() throws Exception { .addClause(new SpanTermQuery(new Term(FIELD, "w3"))) .setSlop(1) .build(); - spans = MultiSpansWrapper.wrap(reader, q); + spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans, 2, 0, 5); assertNext(spans, 3, 0, 6); assertFinished(spans); @@ -285,7 +286,7 @@ public void testMultipleGaps() throws Exception { .addGap(2) .addClause(new SpanTermQuery(new Term(FIELD, "g"))) .build(); - Spans spans = MultiSpansWrapper.wrap(reader, q); + Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans, 5, 0, 4); assertNext(spans, 5, 9, 13); assertFinished(spans); diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java index 03fc40dda39a..dfc0439e32ca 100644 --- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java +++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanCollection.java @@ -67,9 +67,10 @@ public void setUp() throws Exception { doc.add(newField(FIELD, docFields[i], OFFSETS)); writer.addDocument(doc); } + writer.forceMerge(1); reader = writer.getReader(); writer.close(); - searcher = newSearcher(reader); + searcher = newSearcher(getOnlyLeafReader(reader)); } private static class TermCollector implements SpanCollector { @@ -119,7 +120,7 @@ public void testNestedNearQuery() throws IOException { SpanNearQuery q7 = new SpanNearQuery(new SpanQuery[]{q1, q6}, 1, true); TermCollector collector = new TermCollector(); - Spans spans = MultiSpansWrapper.wrap(reader, q7, SpanWeight.Postings.POSITIONS); + Spans spans = q7.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals(0, spans.advance(0)); spans.nextStartPosition(); checkCollectedTerms(spans, collector, new Term(FIELD, "w1"), new Term(FIELD, "w2"), new Term(FIELD, "w3")); @@ -139,7 +140,7 @@ public void testOrQuery() throws IOException { SpanOrQuery orQuery = new SpanOrQuery(q2, q3); TermCollector collector = new TermCollector(); - Spans spans = MultiSpansWrapper.wrap(reader, orQuery, SpanWeight.Postings.POSITIONS); + Spans spans = orQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals(1, spans.advance(1)); spans.nextStartPosition(); @@ -169,7 +170,7 @@ public void testSpanNotQuery() throws IOException { SpanNotQuery notq = new SpanNotQuery(nq, q3); TermCollector collector = new TermCollector(); - Spans spans = MultiSpansWrapper.wrap(reader, notq, SpanWeight.Postings.POSITIONS); + Spans spans = notq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals(2, spans.advance(2)); spans.nextStartPosition(); diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java index c26070416c26..3e50183e739a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpanContainQuery.java @@ -47,9 +47,10 @@ public void setUp() throws Exception { doc.add(newTextField(field, docFields[i], Field.Store.YES)); writer.addDocument(doc); } + writer.forceMerge(1); reader = writer.getReader(); writer.close(); - searcher = newSearcher(reader); + searcher = newSearcher(getOnlyLeafReader(reader)); } @Override @@ -71,7 +72,7 @@ void checkHits(Query query, int[] results) throws Exception { } Spans makeSpans(SpanQuery sq) throws Exception { - return MultiSpansWrapper.wrap(searcher.getIndexReader(), sq); + return sq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); } void tstEqualSpans(String mes, SpanQuery expectedQ, SpanQuery actualQ) throws Exception { @@ -144,4 +145,4 @@ public void testSpanContainPhraseSecondWord() throws Exception { assertFinished(spans); } -} \ No newline at end of file +} diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java index 9352f60b7740..29833012a456 100644 --- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java +++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpans.java @@ -70,9 +70,10 @@ public void setUp() throws Exception { doc.add(newTextField(field, docFields[i], Field.Store.YES)); writer.addDocument(doc); } + writer.forceMerge(1); reader = writer.getReader(); writer.close(); - searcher = newSearcher(reader); + searcher = newSearcher(getOnlyLeafReader(reader)); } @Override @@ -201,7 +202,7 @@ public void testSpanNearOrderedEqual15() throws Exception { public void testSpanNearOrderedOverlap() throws Exception { final SpanQuery query = spanNearOrderedQuery(field, 1, "t1", "t2", "t3"); - Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), query); + Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertEquals("first doc", 11, spans.nextDoc()); assertEquals("first start", 0, spans.nextStartPosition()); @@ -216,7 +217,7 @@ public void testSpanNearOrderedOverlap() throws Exception { public void testSpanNearUnOrdered() throws Exception { //See http://www.gossamer-threads.com/lists/lucene/java-dev/52270 for discussion about this test SpanQuery senq = spanNearUnorderedQuery(field, 0, "u1", "u2"); - Spans spans = MultiSpansWrapper.wrap(reader, senq); + Spans spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans, 4, 1, 3); assertNext(spans, 5, 2, 4); assertNext(spans, 8, 2, 4); @@ -225,7 +226,7 @@ public void testSpanNearUnOrdered() throws Exception { assertFinished(spans); senq = spanNearUnorderedQuery(1, senq, spanTermQuery(field, "u2")); - spans = MultiSpansWrapper.wrap(reader, senq); + spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertNext(spans, 4, 0, 3); assertNext(spans, 4, 1, 3); // unordered spans can be subsets assertNext(spans, 5, 0, 4); @@ -239,7 +240,7 @@ public void testSpanNearUnOrdered() throws Exception { } private Spans orSpans(String[] terms) throws Exception { - return MultiSpansWrapper.wrap(searcher.getIndexReader(), spanOrQuery(field, terms)); + return spanOrQuery(field, terms).createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); } public void testSpanOrEmpty() throws Exception { @@ -443,7 +444,7 @@ private int spanCount(String include, String exclude, int pre, int post) throws SpanQuery iq = spanTermQuery(field, include); SpanQuery eq = spanTermQuery(field, exclude); SpanQuery snq = spanNotQuery(iq, eq, pre, post); - Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), snq); + Spans spans = snq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); int i = 0; if (spans != null) { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java index 5d374f782c7a..e052541b3604 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/sortedset/DefaultSortedSetDocValuesReaderState.java @@ -23,9 +23,16 @@ import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState.OrdRange; -import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues; +import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues; +import org.apache.lucene.index.MultiDocValues.OrdinalMap; +import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; @@ -35,12 +42,13 @@ public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesReaderState { private final String field; - private final LeafReader topReader; private final int valueCount; /** {@link IndexReader} passed to the constructor. */ public final IndexReader origReader; + private final Map cachedOrdMaps = new HashMap<>(); + private final Map prefixToOrdRange = new HashMap<>(); /** Creates this, pulling doc values from the default {@link @@ -57,8 +65,7 @@ public DefaultSortedSetDocValuesReaderState(IndexReader reader, String field) th // We need this to create thread-safe MultiSortedSetDV // per collector: - topReader = SlowCompositeReaderWrapper.wrap(reader); - SortedSetDocValues dv = topReader.getSortedSetDocValues(field); + SortedSetDocValues dv = getDocValues(); if (dv == null) { throw new IllegalArgumentException("field \"" + field + "\" was not indexed with SortedSetDocValues"); } @@ -100,7 +107,43 @@ public DefaultSortedSetDocValuesReaderState(IndexReader reader, String field) th /** Return top-level doc values. */ @Override public SortedSetDocValues getDocValues() throws IOException { - return topReader.getSortedSetDocValues(field); + // TODO: this is dup'd from slow composite reader wrapper ... can we factor it out to share? + OrdinalMap map = null; + synchronized (cachedOrdMaps) { + map = cachedOrdMaps.get(field); + if (map == null) { + // uncached, or not a multi dv + SortedSetDocValues dv = MultiDocValues.getSortedSetValues(origReader, field); + if (dv instanceof MultiSortedSetDocValues) { + map = ((MultiSortedSetDocValues)dv).mapping; + if (map.owner == origReader.getCoreCacheKey()) { + cachedOrdMaps.put(field, map); + } + } + return dv; + } + } + + assert map != null; + int size = origReader.leaves().size(); + final SortedSetDocValues[] values = new SortedSetDocValues[size]; + final int[] starts = new int[size+1]; + for (int i = 0; i < size; i++) { + LeafReaderContext context = origReader.leaves().get(i); + final LeafReader reader = context.reader(); + final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); + if (fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED_SET) { + return null; + } + SortedSetDocValues v = reader.getSortedSetDocValues(field); + if (v == null) { + v = DocValues.emptySortedSet(); + } + values[i] = v; + starts[i] = context.docBase; + } + starts[size] = origReader.maxDoc(); + return new MultiSortedSetDocValues(values, starts, map); } /** Returns mapping from prefix to {@link OrdRange}. */ diff --git a/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java b/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java index 7659be80ec8f..60beddd88d0b 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java @@ -32,7 +32,6 @@ import org.apache.lucene.facet.LabelAndValue; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; @@ -227,41 +226,6 @@ public void testSomeSegmentsMissing() throws Exception { dir.close(); } - public void testSlowCompositeReaderWrapper() throws Exception { - Directory dir = newDirectory(); - - RandomIndexWriter writer = new RandomIndexWriter(random(), dir); - - FacetsConfig config = new FacetsConfig(); - - Document doc = new Document(); - doc.add(new SortedSetDocValuesFacetField("a", "foo1")); - writer.addDocument(config.build(doc)); - - writer.commit(); - - doc = new Document(); - doc.add(new SortedSetDocValuesFacetField("a", "foo2")); - writer.addDocument(config.build(doc)); - - // NRT open - IndexSearcher searcher = new IndexSearcher(SlowCompositeReaderWrapper.wrap(writer.getReader())); - - // Per-top-reader state: - SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader()); - - FacetsCollector c = new FacetsCollector(); - searcher.search(new MatchAllDocsQuery(), c); - Facets facets = new SortedSetDocValuesFacetCounts(state, c); - - // Ask for top 10 labels for any dims that have counts: - assertEquals("dim=a path=[] value=2 childCount=2\n foo1 (1)\n foo2 (1)\n", facets.getTopChildren(10, "a").toString()); - - writer.close(); - IOUtils.close(searcher.getIndexReader(), dir); - } - - public void testRandom() throws Exception { String[] tokens = getRandomTokens(10); Directory indexDir = newDirectory(); diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java index a3bda0b3a6f1..2f51c9433c50 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java @@ -44,7 +44,6 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.ReaderUtil; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource; @@ -1164,16 +1163,7 @@ private TopGroups searchShards(IndexSearcher topSearcher, ShardSearche final List>> shardGroups = new ArrayList<>(); List> firstPassGroupingCollectors = new ArrayList<>(); AbstractFirstPassGroupingCollector firstPassCollector = null; - boolean shardsCanUseIDV; - if (canUseIDV) { - if (SlowCompositeReaderWrapper.class.isAssignableFrom(subSearchers[0].getIndexReader().getClass())) { - shardsCanUseIDV = false; - } else { - shardsCanUseIDV = !preFlex; - } - } else { - shardsCanUseIDV = false; - } + boolean shardsCanUseIDV = canUseIDV; String groupField = "group"; diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java index af507cdc2fb6..2796e015b517 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java @@ -58,7 +58,6 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Term; @@ -558,7 +557,7 @@ public String toString(String field) { assertEquals(numParents, topDocs.totalHits); for (int i = 0; i < topDocs.scoreDocs.length; i++) { ScoreDoc scoreDoc = topDocs.scoreDocs[i]; - String id = SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()).document(scoreDoc.doc).get("id"); + String id = searcher.doc(scoreDoc.doc).get("id"); assertEquals(lowestScoresPerParent.get(id), scoreDoc.score, 0f); } @@ -567,7 +566,7 @@ public String toString(String field) { assertEquals(numParents, topDocs.totalHits); for (int i = 0; i < topDocs.scoreDocs.length; i++) { ScoreDoc scoreDoc = topDocs.scoreDocs[i]; - String id = SlowCompositeReaderWrapper.wrap(searcher.getIndexReader()).document(scoreDoc.doc).get("id"); + String id = searcher.doc(scoreDoc.doc).get("id"); assertEquals(highestScoresPerParent.get(id), scoreDoc.score, 0f); } @@ -1229,8 +1228,7 @@ public boolean needsScores() { final Map docToJoinScore = new HashMap<>(); if (multipleValuesPerDocument) { - LeafReader slowCompositeReader = SlowCompositeReaderWrapper.wrap(topLevelReader); - Terms terms = slowCompositeReader.terms(toField); + Terms terms = MultiFields.getTerms(topLevelReader, toField); if (terms != null) { PostingsEnum postingsEnum = null; SortedSet joinValues = new TreeSet<>(); diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java index e29e8c805e59..57e25fe0b0b6 100644 --- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java +++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java @@ -41,7 +41,6 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.CompositeReader; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; @@ -49,8 +48,10 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; @@ -66,8 +67,8 @@ import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; -import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.ByteBlockPool.Allocator; +import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LineFileDocs; @@ -170,15 +171,14 @@ public void assertAgainstRAMDirectory(MemoryIndex memory) throws Exception { private void duellReaders(CompositeReader other, LeafReader memIndexReader) throws IOException { - LeafReader competitor = SlowCompositeReaderWrapper.wrap(other); Fields memFields = memIndexReader.fields(); - for (String field : competitor.fields()) { + for (String field : MultiFields.getFields(other)) { Terms memTerms = memFields.terms(field); Terms iwTerms = memIndexReader.terms(field); if (iwTerms == null) { assertNull(memTerms); } else { - NumericDocValues normValues = competitor.getNormValues(field); + NumericDocValues normValues = MultiDocValues.getNormValues(other, field); NumericDocValues memNormValues = memIndexReader.getNormValues(field); if (normValues != null) { // mem idx always computes norms on the fly diff --git a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java b/lucene/misc/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java similarity index 98% rename from lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java rename to lucene/misc/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java index e44c53cb54ea..de79ab07f764 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java +++ b/lucene/misc/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java @@ -41,6 +41,7 @@ * leaves and then operate per-LeafReader, * instead of using this class. */ + public final class SlowCompositeReaderWrapper extends LeafReader { private final CompositeReader in; @@ -63,6 +64,9 @@ public static LeafReader wrap(IndexReader reader) throws IOException { SlowCompositeReaderWrapper(CompositeReader reader, boolean merging) throws IOException { super(); in = reader; + if (getFieldInfos().hasPointValues()) { + throw new IllegalArgumentException("cannot wrap points"); + } fields = MultiFields.getFields(in); in.registerParentReader(this); this.merging = merging; diff --git a/lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java b/lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java new file mode 100644 index 000000000000..d37e45abebfb --- /dev/null +++ b/lucene/misc/src/test/org/apache/lucene/index/TestSlowCompositeReaderWrapper.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.index; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.lucene.document.Document; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestUtil; + +public class TestSlowCompositeReaderWrapper extends LuceneTestCase { + + public void testCoreListenerOnSlowCompositeReaderWrapper() throws IOException { + RandomIndexWriter w = new RandomIndexWriter(random(), newDirectory()); + final int numDocs = TestUtil.nextInt(random(), 1, 5); + for (int i = 0; i < numDocs; ++i) { + w.addDocument(new Document()); + if (random().nextBoolean()) { + w.commit(); + } + } + w.commit(); + w.close(); + + final IndexReader reader = DirectoryReader.open(w.w.getDirectory()); + final LeafReader leafReader = SlowCompositeReaderWrapper.wrap(reader); + + final int numListeners = TestUtil.nextInt(random(), 1, 10); + final List listeners = new ArrayList<>(); + AtomicInteger counter = new AtomicInteger(numListeners); + + for (int i = 0; i < numListeners; ++i) { + CountCoreListener listener = new CountCoreListener(counter, leafReader.getCoreCacheKey()); + listeners.add(listener); + leafReader.addCoreClosedListener(listener); + } + for (int i = 0; i < 100; ++i) { + leafReader.addCoreClosedListener(listeners.get(random().nextInt(listeners.size()))); + } + final int removed = random().nextInt(numListeners); + Collections.shuffle(listeners, random()); + for (int i = 0; i < removed; ++i) { + leafReader.removeCoreClosedListener(listeners.get(i)); + } + assertEquals(numListeners, counter.get()); + // make sure listeners are registered on the wrapped reader and that closing any of them has the same effect + if (random().nextBoolean()) { + reader.close(); + } else { + leafReader.close(); + } + assertEquals(removed, counter.get()); + w.w.getDirectory().close(); + } + + private static final class CountCoreListener implements LeafReader.CoreClosedListener { + + private final AtomicInteger count; + private final Object coreCacheKey; + + public CountCoreListener(AtomicInteger count, Object coreCacheKey) { + this.count = count; + this.coreCacheKey = coreCacheKey; + } + + @Override + public void onClose(Object coreCacheKey) { + assertSame(this.coreCacheKey, coreCacheKey); + count.decrementAndGet(); + } + + } +} diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java index 4861cd35ca2f..0c5d765b8cd3 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java @@ -443,9 +443,9 @@ public void testBackToTheFuture() throws Exception { iw.deleteDocuments(new Term("foo", "baz")); DirectoryReader r2 = DirectoryReader.open(iw); - FieldCache.DEFAULT.getDocTermOrds(getOnlySegmentReader(r2), "foo", null); + FieldCache.DEFAULT.getDocTermOrds(getOnlyLeafReader(r2), "foo", null); - SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(getOnlySegmentReader(r1), "foo", null); + SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(getOnlyLeafReader(r1), "foo", null); assertEquals(3, v.getValueCount()); v.setDocument(1); assertEquals(1, v.nextOrd()); @@ -473,7 +473,7 @@ public void testNumericEncoded32() throws IOException { iw.close(); DirectoryReader ir = DirectoryReader.open(dir); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(ar, "foo", FieldCache.INT32_TERM_PREFIX); assertEquals(2, v.getValueCount()); @@ -514,7 +514,7 @@ public void testNumericEncoded64() throws IOException { iw.close(); DirectoryReader ir = DirectoryReader.open(dir); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(ar, "foo", FieldCache.INT64_TERM_PREFIX); assertEquals(2, v.getValueCount()); @@ -563,7 +563,7 @@ public void testSortedTermsEnum() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - LeafReader ar = getOnlySegmentReader(ireader); + LeafReader ar = getOnlyLeafReader(ireader); SortedSetDocValues dv = FieldCache.DEFAULT.getDocTermOrds(ar, "field", null); assertEquals(3, dv.getValueCount()); @@ -648,7 +648,7 @@ public void testActuallySingleValued() throws IOException { iw.close(); DirectoryReader ir = DirectoryReader.open(dir); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); SortedSetDocValues v = FieldCache.DEFAULT.getDocTermOrds(ar, "foo", null); assertNotNull(DocValues.unwrapSingleton(v)); // actually a single-valued field diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java index 1b322d936008..93c302c51e77 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java @@ -419,7 +419,7 @@ public void testDocValuesIntegration() throws Exception { iw.addDocument(doc); DirectoryReader ir = iw.getReader(); iw.close(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); // Binary type: can be retrieved via getTerms() expectThrows(IllegalStateException.class, () -> { @@ -535,7 +535,7 @@ public void testNonexistantFields() throws Exception { DirectoryReader ir = iw.getReader(); iw.close(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); @@ -593,7 +593,7 @@ public void testNonIndexedFields() throws Exception { DirectoryReader ir = iw.getReader(); iw.close(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); @@ -673,7 +673,7 @@ public void testLongFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LONG_POINT_PARSER, false); + final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LONG_POINT_PARSER, false); for (int i = 0; i < values.length; ++i) { assertEquals(values[i], longs.get(i)); } @@ -719,7 +719,7 @@ public void testIntFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.INT_POINT_PARSER, false); + final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.INT_POINT_PARSER, false); for (int i = 0; i < values.length; ++i) { assertEquals(values[i], ints.get(i)); } diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java index 0d5584e4544e..de8eab1db77f 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java @@ -47,7 +47,7 @@ public void testFieldCacheReuseAfterReopen() throws Exception { // Open reader1 DirectoryReader r = DirectoryReader.open(dir); - LeafReader r1 = getOnlySegmentReader(r); + LeafReader r1 = getOnlyLeafReader(r); final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.INT_POINT_PARSER, false); assertEquals(17, ints.get(0)); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java index 23b7d0c7a912..9809324e0593 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java @@ -399,7 +399,7 @@ private void doTestSortedSetVsUninvertedField(int minLength, int maxLength) thro // now compare again after the merge ir = writer.getReader(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); SortedSetDocValues expected = FieldCache.DEFAULT.getDocTermOrds(ar, "indexed", null); SortedSetDocValues actual = ar.getSortedSetDocValues("dv"); assertEquals(ir.maxDoc(), expected, actual); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java index e716419de7ca..3f1f450d01f5 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java @@ -181,7 +181,7 @@ public void test2() throws Exception { final DirectoryReader r = writer.getReader(); writer.close(); - final LeafReader sr = getOnlySegmentReader(r); + final LeafReader sr = getOnlyLeafReader(r); final long END_TIME = System.currentTimeMillis() + (TEST_NIGHTLY ? 30 : 1); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java index c4ef1c4f4b76..4fd66ad05ea2 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java @@ -307,7 +307,7 @@ public void testDocValuesIntegration() throws Exception { iw.addDocument(doc); DirectoryReader ir = iw.getReader(); iw.close(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); // Binary type: can be retrieved via getTerms() expectThrows(IllegalStateException.class, () -> { @@ -340,7 +340,7 @@ public void testNonexistantFields() throws Exception { DirectoryReader ir = iw.getReader(); iw.close(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); @@ -379,7 +379,7 @@ public void testNonIndexedFields() throws Exception { DirectoryReader ir = iw.getReader(); iw.close(); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); @@ -440,7 +440,7 @@ public void testLongFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false); + final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false); for (int i = 0; i < values.length; ++i) { assertEquals(values[i], longs.get(i)); } @@ -486,7 +486,7 @@ public void testIntFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false); + final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false); for (int i = 0; i < values.length; ++i) { assertEquals(values[i], ints.get(i)); } diff --git a/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java b/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java index a7bfffdfc61c..e991b0cb61dd 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/CommonTermsQueryTest.java @@ -32,7 +32,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; import org.apache.lucene.index.Terms; @@ -399,8 +398,9 @@ public void testRandomIndex() throws IOException { analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); RandomIndexWriter w = new RandomIndexWriter(random(), dir, analyzer); createRandomIndex(atLeast(50), w, random().nextLong()); + w.forceMerge(1); DirectoryReader reader = w.getReader(); - LeafReader wrapper = SlowCompositeReaderWrapper.wrap(reader); + LeafReader wrapper = getOnlyLeafReader(reader); String field = "body"; Terms terms = wrapper.terms(field); PriorityQueue lowFreqQueue = new PriorityQueue( @@ -489,7 +489,7 @@ protected boolean lessThan(TermAndFreq a, TermAndFreq b) { QueryUtils.check(random(), cq, newSearcher(reader2)); reader2.close(); } finally { - IOUtils.close(reader, wrapper, w, dir, analyzer); + IOUtils.close(wrapper, w, dir, analyzer); } } diff --git a/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java b/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java index c14d5438d275..a87e45d23f08 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/TermsQueryTest.java @@ -342,11 +342,11 @@ public void testIsConsideredCostlyByQueryCache() throws IOException { w.close(); TermsQuery query = new TermsQuery(new Term("foo", "bar"), new Term("foo", "baz")); UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy(); - assertFalse(policy.shouldCache(query, getOnlySegmentReader(reader).getContext())); + assertFalse(policy.shouldCache(query, getOnlyLeafReader(reader).getContext())); policy.onUse(query); policy.onUse(query); // cached after two uses - assertTrue(policy.shouldCache(query, getOnlySegmentReader(reader).getContext())); + assertTrue(policy.shouldCache(query, getOnlyLeafReader(reader).getContext())); reader.close(); dir.close(); } diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java index d028dce6db43..b72bfebfa44a 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java @@ -50,7 +50,7 @@ public void testSimple() throws Exception { DirectoryReader ir = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(ir); - LeafReader ar = getOnlySegmentReader(ir); + LeafReader ar = getOnlyLeafReader(ir); ValueSource vs = new SortedSetFieldSource("value"); FunctionValues values = vs.getValues(Collections.emptyMap(), ar.getContext()); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java index 484c1bd4f649..cfd5156aa6ff 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java @@ -126,10 +126,11 @@ public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) th doc.add(new TextField(NO_PAYLOAD_FIELD, English.intToEnglish(i), Field.Store.YES)); writer.addDocument(doc); } + writer.forceMerge(1); reader = DirectoryReader.open(writer); writer.close(); - IndexSearcher searcher = LuceneTestCase.newSearcher(reader); + IndexSearcher searcher = LuceneTestCase.newSearcher(LuceneTestCase.getOnlyLeafReader(reader)); searcher.setSimilarity(similarity); return searcher; } diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java index 3f168bba4aff..179b971fbcaf 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadSpans.java @@ -42,7 +42,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; -import org.apache.lucene.search.spans.MultiSpansWrapper; import org.apache.lucene.search.spans.SpanCollector; import org.apache.lucene.search.spans.SpanFirstQuery; import org.apache.lucene.search.spans.SpanNearQuery; @@ -75,12 +74,12 @@ public void testSpanTermQuery() throws Exception { Spans spans; stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "seventy")); - spans = MultiSpansWrapper.wrap(indexReader, stq, SpanWeight.Postings.PAYLOADS); + spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 100, 1, 1, 1); stq = new SpanTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "seventy")); - spans = MultiSpansWrapper.wrap(indexReader, stq, SpanWeight.Postings.PAYLOADS); + spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 100, 0, 0, 0); } @@ -91,7 +90,7 @@ public void testSpanFirst() throws IOException { SpanFirstQuery sfq; match = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one")); sfq = new SpanFirstQuery(match, 2); - Spans spans = MultiSpansWrapper.wrap(indexReader, sfq, SpanWeight.Postings.PAYLOADS); + Spans spans = sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); checkSpans(spans, 109, 1, 1, 1); //Test more complicated subclause SpanQuery[] clauses = new SpanQuery[2]; @@ -99,11 +98,11 @@ public void testSpanFirst() throws IOException { clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "hundred")); match = new SpanNearQuery(clauses, 0, true); sfq = new SpanFirstQuery(match, 2); - checkSpans(MultiSpansWrapper.wrap(indexReader, sfq, SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1); + checkSpans(sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1); match = new SpanNearQuery(clauses, 0, false); sfq = new SpanFirstQuery(match, 2); - checkSpans(MultiSpansWrapper.wrap(indexReader, sfq, SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1); + checkSpans(sfq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS), 100, 2, 1, 1); } @@ -123,10 +122,10 @@ public void testSpanNot() throws Exception { Document doc = new Document(); doc.add(newTextField(PayloadHelper.FIELD, "one two three one four three", Field.Store.YES)); writer.addDocument(doc); - IndexReader reader = writer.getReader(); + IndexReader reader = getOnlyLeafReader(writer.getReader()); writer.close(); - checkSpans(MultiSpansWrapper.wrap(reader, snq, SpanWeight.Postings.PAYLOADS), 1, new int[]{2}); + checkSpans(snq.createWeight(newSearcher(reader), false).getSpans(reader.leaves().get(0), SpanWeight.Postings.PAYLOADS), 1, new int[]{2}); reader.close(); directory.close(); } @@ -137,7 +136,7 @@ public void testNestedSpans() throws Exception { IndexSearcher searcher = getSearcher(); stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "mark")); - spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), stq, SpanWeight.Postings.PAYLOADS); + spans = stq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertNull(spans); SpanQuery[] clauses = new SpanQuery[3]; @@ -146,7 +145,7 @@ public void testNestedSpans() throws Exception { clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "xx")); SpanNearQuery spanNearQuery = new SpanNearQuery(clauses, 12, false); - spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), spanNearQuery, SpanWeight.Postings.PAYLOADS); + spans = spanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 2, new int[]{3,3}); @@ -157,7 +156,7 @@ public void testNestedSpans() throws Exception { spanNearQuery = new SpanNearQuery(clauses, 6, true); - spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), spanNearQuery, SpanWeight.Postings.PAYLOADS); + spans = spanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 1, new int[]{3}); @@ -179,7 +178,7 @@ public void testNestedSpans() throws Exception { SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses2, 6, false); // yy within 6 of xx within 6 of rr - spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), nestedSpanNearQuery, SpanWeight.Postings.PAYLOADS); + spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 2, new int[]{3,3}); closeIndexReader.close(); @@ -210,7 +209,7 @@ public void testFirstClauseWithoutPayload() throws Exception { clauses3[1] = snq; SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false); - spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), nestedSpanNearQuery, SpanWeight.Postings.PAYLOADS); + spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 1, new int[]{3}); @@ -248,7 +247,7 @@ public void testHeavilyNestedSpanQuery() throws Exception { SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false); - spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), nestedSpanNearQuery, SpanWeight.Postings.PAYLOADS); + spans = nestedSpanNearQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 2, new int[]{8, 8}); closeIndexReader.close(); @@ -265,7 +264,7 @@ public void testShrinkToAfterShortestMatch() throws IOException { writer.addDocument(doc); IndexReader reader = writer.getReader(); - IndexSearcher is = newSearcher(reader); + IndexSearcher is = newSearcher(getOnlyLeafReader(reader)); writer.close(); SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a")); @@ -273,7 +272,7 @@ public void testShrinkToAfterShortestMatch() throws IOException { SpanQuery[] sqs = { stq1, stq2 }; SpanNearQuery snq = new SpanNearQuery(sqs, 1, true); VerifyingCollector collector = new VerifyingCollector(); - Spans spans = MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS); + Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); TopDocs topDocs = is.search(snq, 1); Set payloadSet = new HashSet<>(); @@ -304,7 +303,7 @@ public void testShrinkToAfterShortestMatch2() throws IOException { doc.add(new TextField("content", new StringReader("a b a d k f a h i k a k"))); writer.addDocument(doc); IndexReader reader = writer.getReader(); - IndexSearcher is = newSearcher(reader); + IndexSearcher is = newSearcher(getOnlyLeafReader(reader)); writer.close(); SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a")); @@ -312,7 +311,7 @@ public void testShrinkToAfterShortestMatch2() throws IOException { SpanQuery[] sqs = { stq1, stq2 }; SpanNearQuery snq = new SpanNearQuery(sqs, 0, true); VerifyingCollector collector = new VerifyingCollector(); - Spans spans = MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS); + Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); TopDocs topDocs = is.search(snq, 1); Set payloadSet = new HashSet<>(); @@ -343,14 +342,14 @@ public void testShrinkToAfterShortestMatch3() throws IOException { doc.add(new TextField("content", new StringReader("j k a l f k k p a t a k l k t a"))); writer.addDocument(doc); IndexReader reader = writer.getReader(); - IndexSearcher is = newSearcher(reader); + IndexSearcher is = newSearcher(getOnlyLeafReader(reader)); writer.close(); SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a")); SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k")); SpanQuery[] sqs = { stq1, stq2 }; SpanNearQuery snq = new SpanNearQuery(sqs, 0, true); - Spans spans = MultiSpansWrapper.wrap(is.getIndexReader(), snq, SpanWeight.Postings.PAYLOADS); + Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS); TopDocs topDocs = is.search(snq, 1); Set payloadSet = new HashSet<>(); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java index f453b0d0af72..dfa0191efa7b 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/TestPayloadTermQuery.java @@ -38,10 +38,10 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; -import org.apache.lucene.search.spans.MultiSpansWrapper; import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.Spans; import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.search.spans.SpanWeight; +import org.apache.lucene.search.spans.Spans; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.English; @@ -131,10 +131,11 @@ public static void beforeClass() throws Exception { doc.add(newTextField("multiField", English.intToEnglish(i) + " " + English.intToEnglish(i), Field.Store.YES)); writer.addDocument(doc); } + writer.forceMerge(1); reader = writer.getReader(); writer.close(); - searcher = newSearcher(reader); + searcher = newSearcher(getOnlyLeafReader(reader)); searcher.setSimilarity(similarity); } @@ -163,7 +164,7 @@ public void test() throws IOException { assertTrue(doc.score + " does not equal: " + 1, doc.score == 1); } CheckHits.checkExplanations(query, PayloadHelper.FIELD, searcher, true); - Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), query); + Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertTrue("spans is null and it shouldn't be", spans != null); /*float score = hits.score(0); for (int i =1; i < hits.length(); i++) @@ -214,7 +215,7 @@ public void testMultipleMatchesPerDoc() throws Exception { } assertTrue(numTens + " does not equal: " + 10, numTens == 10); CheckHits.checkExplanations(query, "field", searcher, true); - Spans spans = MultiSpansWrapper.wrap(searcher.getIndexReader(), query); + Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS); assertTrue("spans is null and it shouldn't be", spans != null); //should be two matches per document int count = 0; diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java index 5a8a99f4d154..85ac12f7ceab 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java @@ -568,7 +568,7 @@ public void testBytesMergeAwayAllValues() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - BinaryDocValues dv = getOnlySegmentReader(ireader).getBinaryDocValues("field"); + BinaryDocValues dv = getOnlyLeafReader(ireader).getBinaryDocValues("field"); assertEquals(new BytesRef(), dv.get(0)); ireader.close(); @@ -743,7 +743,7 @@ public void testSortedMergeAwayAllValues() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field"); + SortedDocValues dv = getOnlyLeafReader(ireader).getSortedDocValues("field"); if (codecSupportsDocsWithField()) { assertEquals(-1, dv.getOrd(0)); assertEquals(0, dv.getValueCount()); @@ -833,7 +833,7 @@ public void testSortedTermsEnum() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field"); + SortedDocValues dv = getOnlyLeafReader(ireader).getSortedDocValues("field"); assertEquals(3, dv.getValueCount()); TermsEnum termsEnum = dv.termsEnum(); @@ -1077,7 +1077,7 @@ public void testDocValuesSimple() throws IOException { TopDocs search = searcher.search(query.build(), 10); assertEquals(5, search.totalHits); ScoreDoc[] scoreDocs = search.scoreDocs; - NumericDocValues docValues = getOnlySegmentReader(reader).getNumericDocValues("docId"); + NumericDocValues docValues = getOnlyLeafReader(reader).getNumericDocValues("docId"); for (int i = 0; i < scoreDocs.length; i++) { assertEquals(i, scoreDocs[i].doc); assertEquals(i, docValues.get(scoreDocs[i].doc)); @@ -1154,12 +1154,11 @@ public void testRandomSortedBytes() throws IOException { int ord = docValues.lookupTerm(expected); assertEquals(i, ord); } - LeafReader slowR = SlowCompositeReaderWrapper.wrap(reader); Set> entrySet = docToString.entrySet(); for (Entry entry : entrySet) { // pk lookup - PostingsEnum termPostingsEnum = slowR.postings(new Term("id", entry.getKey())); + PostingsEnum termPostingsEnum = TestUtil.docs(random(), reader, "id", new BytesRef(entry.getKey()), null, 0); int docId = termPostingsEnum.nextDoc(); expected = new BytesRef(entry.getValue()); final BytesRef actual = docValues.get(docId); @@ -1516,7 +1515,7 @@ public void testSortedSetOneValue() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); dv.setDocument(0); assertEquals(0, dv.nextOrd()); @@ -1542,7 +1541,7 @@ public void testSortedSetTwoFields() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); dv.setDocument(0); assertEquals(0, dv.nextOrd()); @@ -1551,7 +1550,7 @@ public void testSortedSetTwoFields() throws IOException { BytesRef bytes = dv.lookupOrd(0); assertEquals(new BytesRef("hello"), bytes); - dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field2"); + dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field2"); dv.setDocument(0); assertEquals(0, dv.nextOrd()); @@ -1585,7 +1584,7 @@ public void testSortedSetTwoDocumentsMerged() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(2, dv.getValueCount()); dv.setDocument(0); @@ -1619,7 +1618,7 @@ public void testSortedSetTwoValues() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); dv.setDocument(0); assertEquals(0, dv.nextOrd()); @@ -1649,7 +1648,7 @@ public void testSortedSetTwoValuesUnordered() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); dv.setDocument(0); assertEquals(0, dv.nextOrd()); @@ -1689,7 +1688,7 @@ public void testSortedSetThreeValuesTwoDocs() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(3, dv.getValueCount()); dv.setDocument(0); @@ -1733,7 +1732,7 @@ public void testSortedSetTwoDocumentsLastMissing() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(1, dv.getValueCount()); dv.setDocument(0); @@ -1767,7 +1766,7 @@ public void testSortedSetTwoDocumentsLastMissingMerge() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(1, dv.getValueCount()); dv.setDocument(0); @@ -1800,7 +1799,7 @@ public void testSortedSetTwoDocumentsFirstMissing() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(1, dv.getValueCount()); dv.setDocument(1); @@ -1834,7 +1833,7 @@ public void testSortedSetTwoDocumentsFirstMissingMerge() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(1, dv.getValueCount()); dv.setDocument(1); @@ -1870,7 +1869,7 @@ public void testSortedSetMergeAwayAllValues() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(0, dv.getValueCount()); ireader.close(); @@ -1894,7 +1893,7 @@ public void testSortedSetTermsEnum() throws IOException { DirectoryReader ireader = iwriter.getReader(); iwriter.close(); - SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field"); + SortedSetDocValues dv = getOnlyLeafReader(ireader).getSortedSetDocValues("field"); assertEquals(3, dv.getValueCount()); TermsEnum termsEnum = dv.termsEnum(); @@ -2784,13 +2783,12 @@ public void testEmptyBinaryValueOnPageSizes() throws Exception { IndexReader r = w.getReader(); w.close(); - LeafReader ar = SlowCompositeReaderWrapper.wrap(r); - BinaryDocValues values = ar.getBinaryDocValues("field"); + BinaryDocValues values = MultiDocValues.getBinaryValues(r, "field"); for(int j=0;j<5;j++) { BytesRef result = values.get(0); assertTrue(result.length == 0 || result.length == 1< - * NOTE: This should be used for testing purposes only - * @lucene.internal - */ -public class MultiSpansWrapper { - - public static Spans wrap(IndexReader reader, SpanQuery spanQuery) throws IOException { - return wrap(reader, spanQuery, SpanWeight.Postings.POSITIONS); - } - - public static Spans wrap(IndexReader reader, SpanQuery spanQuery, SpanWeight.Postings requiredPostings) throws IOException { - - LeafReader lr = SlowCompositeReaderWrapper.wrap(reader); // slow, but ok for testing - LeafReaderContext lrContext = lr.getContext(); - IndexSearcher searcher = new IndexSearcher(lr); - searcher.setQueryCache(null); - - SpanWeight w = spanQuery.createWeight(searcher, false); - - return w.getSpans(lrContext, requiredPostings); - } -} diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java index 79eb62803eb3..e5aa7a2e4efb 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java @@ -764,15 +764,29 @@ public String getTestName() { * Some tests expect the directory to contain a single segment, and want to * do tests on that segment's reader. This is an utility method to help them. */ + /* public static SegmentReader getOnlySegmentReader(DirectoryReader reader) { List subReaders = reader.leaves(); if (subReaders.size() != 1) { throw new IllegalArgumentException(reader + " has " + subReaders.size() + " segments instead of exactly one"); } final LeafReader r = subReaders.get(0).reader(); - assertTrue(r instanceof SegmentReader); + assertTrue("expected a SegmentReader but got " + r, r instanceof SegmentReader); return (SegmentReader) r; } + */ + + /** + * Some tests expect the directory to contain a single segment, and want to + * do tests on that segment's reader. This is an utility method to help them. + */ + public static LeafReader getOnlyLeafReader(IndexReader reader) { + List subReaders = reader.leaves(); + if (subReaders.size() != 1) { + throw new IllegalArgumentException(reader + " has " + subReaders.size() + " segments instead of exactly one"); + } + return subReaders.get(0).reader(); + } /** * Returns true if and only if the calling thread is the primary thread @@ -1625,25 +1639,11 @@ static Directory newDirectoryImpl(Random random, String clazzName, LockFactory l } public static IndexReader wrapReader(IndexReader r) throws IOException { - return wrapReader(r, true); - } - - public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeReader) throws IOException { Random random = random(); - // TODO: remove this, and fix those tests to wrap before putting slow around: - final boolean wasOriginallyAtomic = r instanceof LeafReader; for (int i = 0, c = random.nextInt(6)+1; i < c; i++) { - switch(random.nextInt(6)) { + switch(random.nextInt(5)) { case 0: - if (allowSlowCompositeReader) { - if (VERBOSE) { - System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with SlowCompositeReaderWrapper.wrap"); - } - r = SlowCompositeReaderWrapper.wrap(r); - } - break; - case 1: // will create no FC insanity in atomic case, as ParallelLeafReader has own cache key: if (VERBOSE) { System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeaf/CompositeReader"); @@ -1652,7 +1652,7 @@ public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeRe new ParallelLeafReader((LeafReader) r) : new ParallelCompositeReader((CompositeReader) r); break; - case 2: + case 1: // Häckidy-Hick-Hack: a standard MultiReader will cause FC insanity, so we use // QueryUtils' reader with a fake cache key, so insanity checker cannot walk // along our reader: @@ -1661,9 +1661,9 @@ public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeRe } r = new FCInvisibleMultiReader(r); break; - case 3: - if (allowSlowCompositeReader) { - final LeafReader ar = SlowCompositeReaderWrapper.wrap(r); + case 2: + if (r instanceof LeafReader) { + final LeafReader ar = (LeafReader) r; final List allFields = new ArrayList<>(); for (FieldInfo fi : ar.getFieldInfos()) { allFields.add(fi.name); @@ -1673,7 +1673,7 @@ public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeRe final Set fields = new HashSet<>(allFields.subList(0, end)); // will create no FC insanity as ParallelLeafReader has own cache key: if (VERBOSE) { - System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeafReader(SlowCompositeReaderWapper)"); + System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with ParallelLeafReader"); } r = new ParallelLeafReader( new FieldFilterLeafReader(ar, fields, false), @@ -1681,7 +1681,7 @@ public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeRe ); } break; - case 4: + case 3: // Häckidy-Hick-Hack: a standard Reader will cause FC insanity, so we use // QueryUtils' reader with a fake cache key, so insanity checker cannot walk // along our reader: @@ -1694,7 +1694,7 @@ public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeRe r = new AssertingDirectoryReader((DirectoryReader)r); } break; - case 5: + case 4: if (VERBOSE) { System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with MismatchedLeaf/DirectoryReader"); } @@ -1708,11 +1708,8 @@ public static IndexReader wrapReader(IndexReader r, boolean allowSlowCompositeRe fail("should not get here"); } } - if (wasOriginallyAtomic) { - if (allowSlowCompositeReader) { - r = SlowCompositeReaderWrapper.wrap(r); - } - } else if ((r instanceof CompositeReader) && !(r instanceof FCInvisibleMultiReader)) { + + if ((r instanceof CompositeReader) && !(r instanceof FCInvisibleMultiReader)) { // prevent cache insanity caused by e.g. ParallelCompositeReader, to fix we wrap one more time: r = new FCInvisibleMultiReader(r); } diff --git a/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java b/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java index 22cd46701a54..52953490190d 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java +++ b/lucene/test-framework/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java @@ -315,7 +315,7 @@ public int getOffsetGap(String fieldName) { doc.add(new Field("f", "a", ft)); doc.add(new Field("f", "a", ft)); writer.addDocument(doc); - final LeafReader reader = getOnlySegmentReader(writer.getReader()); + final LeafReader reader = getOnlyLeafReader(writer.getReader()); final Fields fields = reader.getTermVectors(0); final Terms terms = fields.terms("f"); final TermsEnum te = terms.iterator(); diff --git a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java index d84bed99ee33..c6396ae551d5 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java +++ b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java @@ -21,17 +21,18 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.BaseStoredFieldsFormatTestCase; import org.apache.lucene.index.CodecReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.store.ByteArrayDataInput; @@ -306,7 +307,7 @@ public void testChunkCleanup() throws IOException { assertNotNull(ir2); ir.close(); ir = ir2; - CodecReader sr = getOnlySegmentReader(ir); + CodecReader sr = (CodecReader) getOnlyLeafReader(ir); CompressingStoredFieldsReader reader = (CompressingStoredFieldsReader)sr.getFieldsReader(); // we could get lucky, and have zero, but typically one. assertTrue(reader.getNumDirtyChunks() <= 1); diff --git a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java index 4fa02786c703..f4858d16cd7e 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java +++ b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java @@ -25,18 +25,18 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.document.StoredField; import org.apache.lucene.document.TextField; +import org.apache.lucene.index.BaseTermVectorsFormatTestCase; import org.apache.lucene.index.CodecReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.BaseTermVectorsFormatTestCase; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum.SeekStatus; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -56,7 +56,7 @@ public void testNoOrds() throws Exception { ft.setStoreTermVectors(true); doc.add(new Field("foo", "this is a test", ft)); iw.addDocument(doc); - LeafReader ir = getOnlySegmentReader(iw.getReader()); + LeafReader ir = getOnlyLeafReader(iw.getReader()); Terms terms = ir.getTermVector(0, "foo"); assertNotNull(terms); TermsEnum termsEnum = terms.iterator(); @@ -118,7 +118,7 @@ public void testChunkCleanup() throws IOException { assertNotNull(ir2); ir.close(); ir = ir2; - CodecReader sr = getOnlySegmentReader(ir); + CodecReader sr = (CodecReader) getOnlyLeafReader(ir); CompressingTermVectorsReader reader = (CompressingTermVectorsReader)sr.getTermVectorsReader(); // we could get lucky, and have zero, but typically one. assertTrue(reader.getNumDirtyChunks() <= 1); diff --git a/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java b/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java index b572289e9144..5276301ba481 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java +++ b/lucene/test-framework/src/test/org/apache/lucene/index/TestAssertingLeafReader.java @@ -54,9 +54,8 @@ public void testAssertBits() throws Exception { assertEquals(1, r.numDocs()); r = new AssertingDirectoryReader((DirectoryReader) r); + final IndexReader r2 = r; - final IndexReader r2 = SlowCompositeReaderWrapper.wrap(r); - Thread thread = new Thread() { @Override public void run() { @@ -68,6 +67,6 @@ public void run() { thread.start(); thread.join(); - IOUtils.close(r2, dir); + IOUtils.close(r, dir); } } From 77d233bc01894c33f02178898dd3368b51902a11 Mon Sep 17 00:00:00 2001 From: Upayavira Date: Thu, 10 Mar 2016 23:48:48 +0000 Subject: [PATCH 0099/1113] SOLR-7858 - update links between new/old UIs for 6.x release --- solr/webapp/web/css/angular/common.css | 13 ++----------- solr/webapp/web/css/styles/common.css | 16 +++++++++++++++- solr/webapp/web/index.html | 6 ++---- solr/webapp/web/old.html | 5 +++-- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/solr/webapp/web/css/angular/common.css b/solr/webapp/web/css/angular/common.css index 1a3b087db5b0..d9604464d6cc 100644 --- a/solr/webapp/web/css/angular/common.css +++ b/solr/webapp/web/css/angular/common.css @@ -762,16 +762,7 @@ pre.syntax .tex .formula padding-left: 16px; } -.new-ui-warning { - position: absolute; - left: 150px; - top: -20px; - align: center; - color: red; - font-weight: bold; -} -.new-ui-warning a.ul { - color: red; - font-weight: bold; +.other-ui-link a.ul { text-decoration: underline; } + diff --git a/solr/webapp/web/css/styles/common.css b/solr/webapp/web/css/styles/common.css index f0e0652fa17e..6c0a9fbcba34 100644 --- a/solr/webapp/web/css/styles/common.css +++ b/solr/webapp/web/css/styles/common.css @@ -714,4 +714,18 @@ pre.syntax .tex .formula } .other-ui-link a.ul { text-decoration: underline; -} \ No newline at end of file +} + +.old-ui-warning { + position: absolute; + right: 0px; + top: -20px; + align: center; + color: red; + font-weight: bold; +} +.old-ui-warning a.ul { + color: red; + font-weight: bold; + text-decoration: underline; +} diff --git a/solr/webapp/web/index.html b/solr/webapp/web/index.html index 3c230478ada3..6c4df6afe86c 100644 --- a/solr/webapp/web/index.html +++ b/solr/webapp/web/index.html @@ -121,10 +121,8 @@

    Connection recovered...

    {{exception.msg}}
    -
    - This is an experimental UI. Report bugs here. - For the old UI click here -   +
    diff --git a/solr/webapp/web/old.html b/solr/webapp/web/old.html index a25504e21ba3..d688a0a56e3c 100644 --- a/solr/webapp/web/old.html +++ b/solr/webapp/web/old.html @@ -79,8 +79,9 @@

    SolrCore Initialization Failures

    -