diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index abde05ec7919e..612838592712b 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 53243c2c081eb..58dcf875ce297 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.2 + - label: "{{matrix.image}} / 8.13.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.3 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index c5b9bb830a8d6..3462e0fb95aba 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -312,8 +312,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.2 - - label: 8.13.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.2#bwcTest + - label: 8.13.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -321,7 +321,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: 8.14.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest timeout_in_minutes: 300 @@ -396,7 +396,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +438,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 2d8ace4845f4f..d3e57196e1c89 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,5 +30,5 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.2" + - "8.13.3" - "8.14.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 89449ff7f9f2f..db131b89ffa4e 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - "7.17.20" - - "8.13.2" + - "8.13.3" - "8.14.0" diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index ccbe9cd2f4a2b..6cb22dad9bc79 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -130,7 +130,8 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { ':server:generateModulesList', ':server:generatePluginsList', ':generateProviderImpls', - ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs'].collect { elasticsearchProject.right()?.task(it) ?: it }) + ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs', + ':x-pack:libs:es-opensaml-security-api:shadowJar'].collect { elasticsearchProject.right()?.task(it) ?: it }) } // this path is produced by the extractLibs task above @@ -239,20 +240,22 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { * but before the XML document, e.g. a doctype or comment */ void modifyXml(Object path, Action action, String preface = null) { - Node xml = parseXml(path) - action.execute(xml) + if (project.file(path).exists()) { + Node xml = parseXml(path) + action.execute(xml) - File xmlFile = project.file(path) - xmlFile.withPrintWriter { writer -> - def printer = new XmlNodePrinter(writer) - printer.namespaceAware = true - printer.preserveWhitespace = true - writer.write("\n") + File xmlFile = project.file(path) + xmlFile.withPrintWriter { writer -> + def printer = new XmlNodePrinter(writer) + printer.namespaceAware = true + printer.preserveWhitespace = true + writer.write("\n") - if (preface != null) { - writer.write(preface) + if (preface != null) { + writer.write(preface) + } + printer.print(xml) } - printer.print(xml) } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 6524247c4c8f6..16c286bfdd3f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -49,6 +49,7 @@ public class MrjarPlugin implements Plugin { private static final Pattern MRJAR_SOURCESET_PATTERN = Pattern.compile("main(\\d{2})"); + private static final String MRJAR_IDEA_ENABLED = "org.gradle.mrjar.idea.enabled"; private final JavaToolchainService javaToolchains; @@ -61,23 +62,30 @@ public class MrjarPlugin implements Plugin { public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - - List mainVersions = findSourceVersions(project); - List mainSourceSets = new ArrayList<>(); - mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); - List testSourceSets = new ArrayList<>(mainSourceSets); - testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); - for (int javaVersion : mainVersions) { - String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; - SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); - configureSourceSetInJar(project, mainSourceSet, javaVersion); - mainSourceSets.add(mainSourceSetName); - testSourceSets.add(mainSourceSetName); - - String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; - SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); - testSourceSets.add(testSourceSetName); - createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + var isIdeaSync = System.getProperty("idea.sync.active", "false").equals("true"); + var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); + + // Ignore version-specific source sets if we are importing into IntelliJ and have not explicitly enabled this. + // Avoids an IntelliJ bug: + // https://youtrack.jetbrains.com/issue/IDEA-285640/Compiler-Options-Settings-language-level-is-set-incorrectly-with-JDK-19ea + if (isIdeaSync == false || ideaSourceSetsEnabled) { + List mainVersions = findSourceVersions(project); + List mainSourceSets = new ArrayList<>(); + mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List testSourceSets = new ArrayList<>(mainSourceSets); + testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); + for (int javaVersion : mainVersions) { + String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; + SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); + configureSourceSetInJar(project, mainSourceSet, javaVersion); + mainSourceSets.add(mainSourceSetName); + testSourceSets.add(mainSourceSetName); + + String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; + SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); + testSourceSets.add(testSourceSetName); + createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + } } configureMrjar(project); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java index 89a40711c9a19..0270ee22ca8c5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.compress.utils.Lists; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainDownload; import org.gradle.jvm.toolchain.JavaToolchainRequest; @@ -20,17 +21,17 @@ import java.io.IOException; import java.net.URI; import java.net.URL; +import java.util.Comparator; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.StreamSupport; import static org.gradle.jvm.toolchain.JavaToolchainDownload.fromUri; public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToolchainResolver { // package protected for better testing - final Map> CACHED_RELEASES = new ConcurrentHashMap<>(); + final Map> CACHED_SEMVERS = new ConcurrentHashMap<>(); @Override public Optional resolve(JavaToolchainRequest request) { @@ -38,7 +39,7 @@ public Optional resolve(JavaToolchainRequest request) { return Optional.empty(); } AdoptiumVersionRequest versionRequestKey = toVersionRequest(request); - Optional versionInfo = CACHED_RELEASES.computeIfAbsent( + Optional versionInfo = CACHED_SEMVERS.computeIfAbsent( versionRequestKey, (r) -> resolveAvailableVersion(versionRequestKey) ); @@ -53,12 +54,12 @@ private AdoptiumVersionRequest toVersionRequest(JavaToolchainRequest request) { return new AdoptiumVersionRequest(platform, arch, javaLanguageVersion); } - private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { + private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { ObjectMapper mapper = new ObjectMapper(); try { int languageVersion = requestKey.languageVersion.asInt(); URL source = new URL( - "https://api.adoptium.net/v3/info/release_names?architecture=" + "https://api.adoptium.net/v3/info/release_versions?architecture=" + requestKey.arch + "&image_type=jdk&os=" + requestKey.platform @@ -70,8 +71,14 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK + ")" ); JsonNode jsonNode = mapper.readTree(source); - JsonNode versionsNode = jsonNode.get("releases"); - return StreamSupport.stream(versionsNode.spliterator(), false).map(JsonNode::textValue).findFirst(); + JsonNode versionsNode = jsonNode.get("versions"); + return Optional.of( + Lists.newArrayList(versionsNode.iterator()) + .stream() + .map(this::toVersionInfo) + .max(Comparator.comparing(AdoptiumVersionInfo::semver)) + .get() + ); } catch (FileNotFoundException e) { // request combo not supported (e.g. aarch64 + windows return Optional.empty(); @@ -80,10 +87,21 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK } } - private URI resolveDownloadURI(AdoptiumVersionRequest request, String version) { + private AdoptiumVersionInfo toVersionInfo(JsonNode node) { + return new AdoptiumVersionInfo( + node.get("build").asInt(), + node.get("major").asInt(), + node.get("minor").asInt(), + node.get("openjdk_version").asText(), + node.get("security").asInt(), + node.get("semver").asText() + ); + } + + private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) { return URI.create( - "https://api.adoptium.net/v3/binary/version/" - + version + "https://api.adoptium.net/v3/binary/version/jdk-" + + versionInfo.semver + "/" + request.platform + "/" @@ -100,5 +118,7 @@ private boolean requestIsSupported(JavaToolchainRequest request) { return anyVendorOr(request.getJavaToolchainSpec().getVendor().get(), JvmVendorSpec.ADOPTIUM); } + record AdoptiumVersionInfo(int build, int major, int minor, String openjdkVersion, int security, String semver) {} + record AdoptiumVersionRequest(String platform, String arch, JavaLanguageVersion languageVersion) {} } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index 162895fd486cf..818cb040c172e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -39,7 +39,11 @@ record JdkBuild(JavaLanguageVersion languageVersion, String version, String buil ); // package private so it can be replaced by tests - List builds = List.of(getBundledJdkBuild()); + List builds = List.of( + getBundledJdkBuild(), + // 22 release candidate + new JdkBuild(JavaLanguageVersion.of(22), "22", "36", "830ec9fcccef480bb3e73fb7ecafe059") + ); private JdkBuild getBundledJdkBuild() { String bundledJdkVersion = VersionProperties.getBundledJdkVersion(); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index fe4a644ddfc1d..6383d577f027f 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -11,6 +11,7 @@ package org.elasticsearch.gradle.internal.toolchain import org.gradle.api.services.BuildServiceParameters import org.gradle.jvm.toolchain.JavaLanguageVersion import org.gradle.jvm.toolchain.JavaToolchainResolver +import org.gradle.platform.OperatingSystem import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toArchString import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toOsString @@ -37,7 +38,12 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { toOsString(it[2], it[1]), toArchString(it[3]), languageVersion); - resolver.CACHED_RELEASES.put(request, Optional.of('jdk-' + languageVersion.asInt() + '.1.1.1+37.1')) + resolver.CACHED_SEMVERS.put(request, Optional.of(new AdoptiumJdkToolchainResolver.AdoptiumVersionInfo(languageVersion.asInt(), + 1, + 1, + "" + languageVersion.asInt() + ".1.1.1+37", + 0, "" + languageVersion.asInt() + ".1.1.1+37.1" + ))) } return resolver diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 8e6c1798b76a3..5885ef49920c7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -2,7 +2,7 @@ elasticsearch = 8.14.0 lucene = 9.11.0-snapshot-b9844481e3a bundled_jdk_vendor = openjdk -bundled_jdk = 22+36@830ec9fcccef480bb3e73fb7ecafe059 +bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java index f6e3578811688..3004494262e6b 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java @@ -457,6 +457,26 @@ public void testLegacyV3() throws GeneralSecurityException, IOException { assertThat(toByteArray(wrapper.getFile("file_setting")), equalTo("file_value".getBytes(StandardCharsets.UTF_8))); } + public void testLegacyV5() throws GeneralSecurityException, IOException { + final Path configDir = createTempDir(); + final Path keystore = configDir.resolve("elasticsearch.keystore"); + try ( + InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v5-with-password-elasticsearch.keystore"); + OutputStream os = Files.newOutputStream(keystore) + ) { + final byte[] buffer = new byte[4096]; + int readBytes; + while ((readBytes = is.read(buffer)) > 0) { + os.write(buffer, 0, readBytes); + } + } + final KeyStoreWrapper wrapper = KeyStoreWrapper.load(configDir); + assertNotNull(wrapper); + wrapper.decrypt("keystorepassword".toCharArray()); + assertThat(wrapper.getFormatVersion(), equalTo(5)); + assertThat(wrapper.getSettingNames(), equalTo(Set.of("keystore.seed"))); + } + public void testSerializationNewlyCreated() throws Exception { final KeyStoreWrapper wrapper = KeyStoreWrapper.create(); wrapper.setString("string_setting", "string_value".toCharArray()); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java index ae19fa0b94b83..979b118a887e5 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cli.ProcessInfo; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; import java.io.InputStream; @@ -46,8 +47,20 @@ public void testKeystoreUpgradeV4() throws Exception { assertKeystoreUpgrade("/format-v4-elasticsearch.keystore", KeyStoreWrapper.V4_VERSION); } + public void testKeystoreUpgradeV5() throws Exception { + assertKeystoreUpgradeWithPassword("/format-v5-with-password-elasticsearch.keystore", KeyStoreWrapper.LE_VERSION); + } + private void assertKeystoreUpgrade(String file, int version) throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); + assertKeystoreUpgrade(file, version, null); + } + + private void assertKeystoreUpgradeWithPassword(String file, int version) throws Exception { + assertKeystoreUpgrade(file, version, "keystorepassword"); + } + + private void assertKeystoreUpgrade(String file, int version, @Nullable String password) throws Exception { final Path keystore = KeyStoreWrapper.keystorePath(env.configFile()); try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream(file); OutputStream os = Files.newOutputStream(keystore)) { is.transferTo(os); @@ -56,11 +69,17 @@ private void assertKeystoreUpgrade(String file, int version) throws Exception { assertNotNull(beforeUpgrade); assertThat(beforeUpgrade.getFormatVersion(), equalTo(version)); } + if (password != null) { + terminal.addSecretInput(password); + terminal.addSecretInput(password); + } execute(); + terminal.reset(); + try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) { assertNotNull(afterUpgrade); assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.CURRENT_VERSION)); - afterUpgrade.decrypt(new char[0]); + afterUpgrade.decrypt(password != null ? password.toCharArray() : new char[0]); assertThat(afterUpgrade.getSettingNames(), hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); } } @@ -69,5 +88,4 @@ public void testKeystoreDoesNotExist() { final UserException e = expectThrows(UserException.class, this::execute); assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]"))); } - } diff --git a/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore b/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore new file mode 100644 index 0000000000000..0547db46eb1ef Binary files /dev/null and b/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore differ diff --git a/docs/changelog/103374.yaml b/docs/changelog/103374.yaml new file mode 100644 index 0000000000000..fcdee9185eb92 --- /dev/null +++ b/docs/changelog/103374.yaml @@ -0,0 +1,16 @@ +pr: 103374 +summary: Cut over stored fields to ZSTD for compression +area: Search +type: enhancement +issues: [] +highlight: + title: Stored fields are now compressed with ZStandard instead of LZ4/DEFLATE + body: |- + Stored fields are now compressed by splitting documents into blocks, which + are then compressed independently with ZStandard. `index.codec: default` + (default) uses blocks of at most 14kB or 128 documents compressed with level + 0, while `index.codec: best_compression` uses blocks of at most 240kB or + 2048 documents compressed at level 3. On most datasets that we tested + against, this yielded storage improvements in the order of 10%, slightly + faster indexing and similar retrieval latencies. + notable: true diff --git a/docs/changelog/106077.yaml b/docs/changelog/106077.yaml new file mode 100644 index 0000000000000..eb987cd9617f8 --- /dev/null +++ b/docs/changelog/106077.yaml @@ -0,0 +1,7 @@ +pr: 106077 +summary: Fix merging component templates with a mix of dotted and nested object mapper + definitions +area: Mapping +type: bug +issues: + - 105482 diff --git a/docs/changelog/106247.yaml b/docs/changelog/106247.yaml deleted file mode 100644 index 5895dffd685a4..0000000000000 --- a/docs/changelog/106247.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106247 -summary: Fix a downsample persistent task assignment bug -area: Downsampling -type: bug -issues: [] diff --git a/docs/changelog/106673.yaml b/docs/changelog/106673.yaml deleted file mode 100644 index 9a716d20ad2bc..0000000000000 --- a/docs/changelog/106673.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106673 -summary: "ESQL: Fix fully pruned aggregates" -area: ES|QL -type: bug -issues: - - 106427 diff --git a/docs/changelog/106873.yaml b/docs/changelog/106873.yaml deleted file mode 100644 index f823caff7aefe..0000000000000 --- a/docs/changelog/106873.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106873 -summary: Query API Key Information API support for the `typed_keys` request parameter -area: Security -type: enhancement -issues: - - 106817 diff --git a/docs/changelog/106899.yaml b/docs/changelog/106899.yaml new file mode 100644 index 0000000000000..a2db24236a47e --- /dev/null +++ b/docs/changelog/106899.yaml @@ -0,0 +1,6 @@ +pr: 106899 +summary: Add ES|QL Locate function +area: ES|QL +type: enhancement +issues: + - 106818 diff --git a/docs/changelog/106975.yaml b/docs/changelog/106975.yaml new file mode 100644 index 0000000000000..bd32b3574c4f9 --- /dev/null +++ b/docs/changelog/106975.yaml @@ -0,0 +1,5 @@ +pr: 106975 +summary: GET /_all should return hidden indices with visible aliases +area: Indices APIs +type: bug +issues: [] diff --git a/docs/changelog/106990.yaml b/docs/changelog/106990.yaml deleted file mode 100644 index 26646e742a5ee..0000000000000 --- a/docs/changelog/106990.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106990 -summary: Address concurrency issue in top hits aggregation -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/107007.yaml b/docs/changelog/107007.yaml new file mode 100644 index 0000000000000..b2a755171725b --- /dev/null +++ b/docs/changelog/107007.yaml @@ -0,0 +1,5 @@ +pr: 107007 +summary: "ESQL: Support ST_DISJOINT" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/107054.yaml b/docs/changelog/107054.yaml deleted file mode 100644 index 6511cb5185492..0000000000000 --- a/docs/changelog/107054.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107054 -summary: Query API Keys support for both `aggs` and `aggregations` keywords -area: Security -type: enhancement -issues: - - 106839 diff --git a/docs/changelog/107059.yaml b/docs/changelog/107059.yaml deleted file mode 100644 index 6c7ee48f9b53b..0000000000000 --- a/docs/changelog/107059.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107059 -summary: "[Connector API] Support numeric for configuration select option value type" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/107107.yaml b/docs/changelog/107107.yaml new file mode 100644 index 0000000000000..5ca611befeb5d --- /dev/null +++ b/docs/changelog/107107.yaml @@ -0,0 +1,5 @@ +pr: 107107 +summary: Increase KDF iteration count in `KeyStoreWrapper` +area: Infra/CLI +type: enhancement +issues: [] diff --git a/docs/changelog/107122.yaml b/docs/changelog/107122.yaml new file mode 100644 index 0000000000000..e227bfd45b939 --- /dev/null +++ b/docs/changelog/107122.yaml @@ -0,0 +1,5 @@ +pr: 107122 +summary: Avoid unintentionally clearing the `DataStream.rolloverOnWrite` flag +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/107129.yaml b/docs/changelog/107129.yaml new file mode 100644 index 0000000000000..6c9b9094962c1 --- /dev/null +++ b/docs/changelog/107129.yaml @@ -0,0 +1,5 @@ +pr: 107129 +summary: Track ongoing search tasks +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/107131.yaml b/docs/changelog/107131.yaml new file mode 100644 index 0000000000000..ebb696931777b --- /dev/null +++ b/docs/changelog/107131.yaml @@ -0,0 +1,6 @@ +pr: 107131 +summary: "ESQL: Fix bug when combining projections" +area: ES|QL +type: bug +issues: + - 107083 diff --git a/docs/changelog/107158.yaml b/docs/changelog/107158.yaml new file mode 100644 index 0000000000000..9589fe7e7264b --- /dev/null +++ b/docs/changelog/107158.yaml @@ -0,0 +1,5 @@ +pr: 107158 +summary: "ESQL: allow sorting by expressions and not only regular fields" +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/107183.yaml b/docs/changelog/107183.yaml new file mode 100644 index 0000000000000..226d036456858 --- /dev/null +++ b/docs/changelog/107183.yaml @@ -0,0 +1,5 @@ +pr: 107183 +summary: ES|QL fix no-length substring with supplementary (4-byte) character +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 6d8a8f748fa0e..bdd3e166c22d6 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1821,7 +1821,8 @@ The API returns the following response: "all_in_bytes": 0, "coordinating_rejections": 0, "primary_rejections": 0, - "replica_rejections": 0 + "replica_rejections": 0, + "primary_document_rejections": 0 }, "limit" : "0b", "limit_in_bytes": 0 diff --git a/docs/reference/data-streams/data-streams.asciidoc b/docs/reference/data-streams/data-streams.asciidoc index 307930d64c4fb..9c7137563caef 100644 --- a/docs/reference/data-streams/data-streams.asciidoc +++ b/docs/reference/data-streams/data-streams.asciidoc @@ -18,6 +18,28 @@ automate the management of these backing indices. For example, you can use hardware and delete unneeded indices. {ilm-init} can help you reduce costs and overhead as your data grows. + +[discrete] +[[should-you-use-a-data-stream]] +== Should you use a data stream? + +To determine whether you should use a data stream for your data, you should consider the format of +the data, and your expected interaction. A good candidate for using a data stream will match the +following criteria: + +* Your data contains a timestamp field, or one could be automatically generated. +* You mostly perform indexing requests, with occasional updates and deletes. +* You index documents without an `_id`, or when indexing documents with an explicit `_id` you expect first-write-wins behavior. + +For most time series data use-cases, a data stream will be a good fit. However, if you find that +your data doesn't fit into these categories (for example, if you frequently send multiple documents +using the same `_id` expecting last-write-wins), you may want to use an index alias with a write +index instead. See documentation for <> for more information. + +Keep in mind that some features such as <> and +<> require a data stream. + [discrete] [[backing-indices]] == Backing indices @@ -116,19 +138,19 @@ You should not derive any intelligence from the backing indices names. [discrete] [[data-streams-append-only]] -== Append-only +== Append-only (mostly) -Data streams are designed for use cases where existing data is rarely, -if ever, updated. You cannot send update or deletion requests for existing -documents directly to a data stream. Instead, use the +Data streams are designed for use cases where existing data is rarely updated. You cannot send +update or deletion requests for existing documents directly to a data stream. However, you can still +<> in a data stream by submitting +requests directly to the document's backing index. + +If you need to update a larger number of documents in a data stream, you can use the <> and <> APIs. -If needed, you can <> by submitting requests directly to the document's backing index. - -TIP: If you frequently update or delete existing time series data, use an index -alias with a write index instead of a data stream. See +TIP: If you frequently send multiple documents using the same `_id` expecting last-write-wins, you +may want to use an index alias with a write index instead. See <>. include::set-up-a-data-stream.asciidoc[] diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index a055c278b41d9..1a32e64cedb1f 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -414,9 +414,7 @@ This parameter is only returned for successful actions. `result`:: (string) Result of the operation. Successful values are `created`, `deleted`, and -`updated`. -+ -This parameter is only returned for successful operations. +`updated`. Other valid values are `noop` and `not_found`. `_shards`:: (object) diff --git a/docs/reference/esql/esql-functions-operators.asciidoc b/docs/reference/esql/esql-functions-operators.asciidoc index a1ad512fbe512..ddc077f3b8ff8 100644 --- a/docs/reference/esql/esql-functions-operators.asciidoc +++ b/docs/reference/esql/esql-functions-operators.asciidoc @@ -1,40 +1,71 @@ [[esql-functions-operators]] === {esql} functions and operators - ++++ Functions and operators ++++ {esql} provides a comprehensive set of functions and operators for working with data. -The functions are divided into the following categories: +The reference documentation is divided into the following categories: [[esql-functions]] -<>:: +==== Functions overview + +.*Aggregate functions* +[%collapsible] +==== include::functions/aggregation-functions.asciidoc[tag=agg_list] +==== -<>:: +.*Math functions* +[%collapsible] +==== include::functions/math-functions.asciidoc[tag=math_list] +==== -<>:: +.*String functions* +[%collapsible] +==== include::functions/string-functions.asciidoc[tag=string_list] +==== -<>:: +.*Date and time functions* +[%collapsible] +==== include::functions/date-time-functions.asciidoc[tag=date_list] +==== -<>:: +.*Spatial functions* +[%collapsible] +==== include::functions/spatial-functions.asciidoc[tag=spatial_list] +==== -<>:: +.*Type conversion functions* +[%collapsible] +==== include::functions/type-conversion-functions.asciidoc[tag=type_list] +==== -<>:: +.*Conditional functions and expressions* +[%collapsible] +==== include::functions/conditional-functions-and-expressions.asciidoc[tag=cond_list] +==== -<>:: +.*Multi value functions* +[%collapsible] +==== include::functions/mv-functions.asciidoc[tag=mv_list] +==== + +[[esql-operators-overview]] +==== Operators overview -<>:: +.*Operators* +[%collapsible] +==== include::functions/operators.asciidoc[tag=op_list] +==== include::functions/aggregation-functions.asciidoc[] include::functions/math-functions.asciidoc[] diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index e4c873457b21b..77f5e79753fdd 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -1,11 +1,10 @@ [[esql-language]] -== Learning {esql} - +== {esql} reference ++++ -Learning {esql} +{esql} reference ++++ -Detailed information about the {esql} language: +Detailed reference documentation for the {esql} language: * <> * <> diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index e9d6628c63894..c1cd36e376a1c 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -22,5 +22,5 @@ include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] include::date_parse.asciidoc[] -include::date_trunc.asciidoc[] +include::layout/date_trunc.asciidoc[] include::now.asciidoc[] diff --git a/docs/reference/esql/functions/description/date_trunc.asciidoc b/docs/reference/esql/functions/description/date_trunc.asciidoc new file mode 100644 index 0000000000000..1fb874e3bd9cd --- /dev/null +++ b/docs/reference/esql/functions/description/date_trunc.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Rounds down a date to the closest interval. diff --git a/docs/reference/esql/functions/description/locate.asciidoc b/docs/reference/esql/functions/description/locate.asciidoc new file mode 100644 index 0000000000000..60a6d435e37b6 --- /dev/null +++ b/docs/reference/esql/functions/description/locate.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns an integer that indicates the position of a keyword substring within another string diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index ed79fe3d9c1f3..678fde7f5d98b 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry contains the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc new file mode 100644 index 0000000000000..95ab02a39614a --- /dev/null +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the two geometries or geometry columns are disjoint. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index 3a36d79cbd123..b736ba29a6c8b 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the two geometries or geometry columns intersect. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index be52db3f694bf..890f28cb769b0 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry is within the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/date_trunc.asciidoc b/docs/reference/esql/functions/examples/date_trunc.asciidoc similarity index 68% rename from docs/reference/esql/functions/date_trunc.asciidoc rename to docs/reference/esql/functions/examples/date_trunc.asciidoc index 4aa228dc14e65..d7cece9aff58b 100644 --- a/docs/reference/esql/functions/date_trunc.asciidoc +++ b/docs/reference/esql/functions/examples/date_trunc.asciidoc @@ -1,26 +1,4 @@ -[discrete] -[[esql-date_trunc]] -=== `DATE_TRUNC` - -*Syntax* - -[source,esql] ----- -DATE_TRUNC(interval, date) ----- - -*Parameters* - -`interval`:: -Interval, expressed using the <>. If `null`, the function returns `null`. - -`date`:: -Date expression. If `null`, the function returns `null`. - -*Description* - -Rounds down a date to the closest interval. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* @@ -32,10 +10,8 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] |=== - Combine `DATE_TRUNC` with <> to create date histograms. For example, the number of hires per year: - [source.merge.styled,esql] ---- include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] @@ -44,9 +20,7 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram-result] |=== - Or an hourly error rate: - [source.merge.styled,esql] ---- include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] @@ -55,3 +29,4 @@ include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] |=== include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] |=== + diff --git a/docs/reference/esql/functions/examples/st_disjoint.asciidoc b/docs/reference/esql/functions/examples/st_disjoint.asciidoc new file mode 100644 index 0000000000000..192553e528a24 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_disjoint.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/layout/date_trunc.asciidoc b/docs/reference/esql/functions/layout/date_trunc.asciidoc new file mode 100644 index 0000000000000..0bd9ce4b4dbe4 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_trunc.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_trunc]] +=== `DATE_TRUNC` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_trunc.svg[Embedded,opts=inline] + +include::../parameters/date_trunc.asciidoc[] +include::../description/date_trunc.asciidoc[] +include::../types/date_trunc.asciidoc[] +include::../examples/date_trunc.asciidoc[] diff --git a/docs/reference/esql/functions/layout/locate.asciidoc b/docs/reference/esql/functions/layout/locate.asciidoc new file mode 100644 index 0000000000000..1017c7f844dd6 --- /dev/null +++ b/docs/reference/esql/functions/layout/locate.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-locate]] +=== `LOCATE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/locate.svg[Embedded,opts=inline] + +include::../parameters/locate.asciidoc[] +include::../description/locate.asciidoc[] +include::../types/locate.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_disjoint.asciidoc b/docs/reference/esql/functions/layout/st_disjoint.asciidoc new file mode 100644 index 0000000000000..a1eef41006f3e --- /dev/null +++ b/docs/reference/esql/functions/layout/st_disjoint.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +include::../parameters/st_disjoint.asciidoc[] +include::../description/st_disjoint.asciidoc[] +include::../types/st_disjoint.asciidoc[] +include::../examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/date_trunc.asciidoc b/docs/reference/esql/functions/parameters/date_trunc.asciidoc new file mode 100644 index 0000000000000..19f7cb6cd7c74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_trunc.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`interval`:: +Interval; expressed using the timespan literal syntax. + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/parameters/locate.asciidoc b/docs/reference/esql/functions/parameters/locate.asciidoc new file mode 100644 index 0000000000000..e48a7a891712c --- /dev/null +++ b/docs/reference/esql/functions/parameters/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +An input string + +`substring`:: +A substring to locate in the input string + +`start`:: +The start index diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc new file mode 100644 index 0000000000000..e87a0d0eb94f0 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/signature/date_trunc.svg b/docs/reference/esql/functions/signature/date_trunc.svg new file mode 100644 index 0000000000000..c82cd04ed5c88 --- /dev/null +++ b/docs/reference/esql/functions/signature/date_trunc.svg @@ -0,0 +1 @@ +DATE_TRUNC(interval,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/locate.svg b/docs/reference/esql/functions/signature/locate.svg new file mode 100644 index 0000000000000..2b7bc2dac0e86 --- /dev/null +++ b/docs/reference/esql/functions/signature/locate.svg @@ -0,0 +1 @@ +LOCATE(string,substring,start) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/st_disjoint.svg b/docs/reference/esql/functions/signature/st_disjoint.svg new file mode 100644 index 0000000000000..becd0be37e441 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_disjoint.svg @@ -0,0 +1 @@ +ST_DISJOINT(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index 739d6b2d6f58f..b6d178ddd624d 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -9,6 +9,7 @@ // tag::spatial_list[] * experimental:[] <> +* experimental:[] <> * experimental:[] <> * experimental:[] <> * experimental:[] <> @@ -16,6 +17,7 @@ // end::spatial_list[] include::st_intersects.asciidoc[] +include::st_disjoint.asciidoc[] include::st_contains.asciidoc[] include::st_within.asciidoc[] include::st_x.asciidoc[] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc index 07b1a11aa7247..110c4fe4ca9ec 100644 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ b/docs/reference/esql/functions/st_contains.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_contains.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_contains.asciidoc[] include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc new file mode 100644 index 0000000000000..db89ca186a0ff --- /dev/null +++ b/docs/reference/esql/functions/st_disjoint.asciidoc @@ -0,0 +1,27 @@ +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_disjoint.asciidoc[] +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ + +include::types/st_disjoint.asciidoc[] +include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc index fbe313d10b0e7..d75a7f3a50e0f 100644 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -24,6 +24,7 @@ This means it is not possible to combine `geo_*` and `cartesian_*` parameters. Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). +This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ include::types/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc index 64adb91219c4a..0f0190a9de638 100644 --- a/docs/reference/esql/functions/st_within.asciidoc +++ b/docs/reference/esql/functions/st_within.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_within.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_within.asciidoc[] include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/auto_bucket.asciidoc index 535e2df29c353..cfe74ae25c3d0 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/auto_bucket.asciidoc @@ -5,5 +5,40 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== field | buckets | from | to | result - +datetime | integer | datetime | datetime | datetime +datetime | integer | datetime | keyword | datetime +datetime | integer | datetime | text | datetime +datetime | integer | keyword | datetime | datetime +datetime | integer | keyword | keyword | datetime +datetime | integer | keyword | text | datetime +datetime | integer | text | datetime | datetime +datetime | integer | text | keyword | datetime +datetime | integer | text | text | datetime +double | integer | double | double | double +double | integer | double | integer | double +double | integer | double | long | double +double | integer | integer | double | double +double | integer | integer | integer | double +double | integer | integer | long | double +double | integer | long | double | double +double | integer | long | integer | double +double | integer | long | long | double +integer | integer | double | double | double +integer | integer | double | integer | double +integer | integer | double | long | double +integer | integer | integer | double | double +integer | integer | integer | integer | double +integer | integer | integer | long | double +integer | integer | long | double | double +integer | integer | long | integer | double +integer | integer | long | long | double +long | integer | double | double | double +long | integer | double | integer | double +long | integer | double | long | double +long | integer | integer | double | double +long | integer | integer | integer | double +long | integer | integer | long | double +long | integer | long | double | double +long | integer | long | integer | double +long | integer | long | long | double |=== diff --git a/docs/reference/esql/functions/types/date_trunc.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc new file mode 100644 index 0000000000000..8df45cfef54a8 --- /dev/null +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +interval | date | result +date_period | datetime | datetime +time_duration | datetime | datetime +|=== diff --git a/docs/reference/esql/functions/types/locate.asciidoc b/docs/reference/esql/functions/types/locate.asciidoc new file mode 100644 index 0000000000000..895dce1335813 --- /dev/null +++ b/docs/reference/esql/functions/types/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | substring | start | result +keyword | keyword | integer | integer +keyword | text | integer | integer +text | keyword | integer | integer +text | text | integer | integer +|=== diff --git a/docs/reference/esql/functions/types/st_disjoint.asciidoc b/docs/reference/esql/functions/types/st_disjoint.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_disjoint.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/docs/reference/ilm/ilm-tutorial.asciidoc b/docs/reference/ilm/ilm-tutorial.asciidoc index c7f2c3537b5e8..4513c523056a9 100644 --- a/docs/reference/ilm/ilm-tutorial.asciidoc +++ b/docs/reference/ilm/ilm-tutorial.asciidoc @@ -282,14 +282,15 @@ DELETE /_index_template/timeseries_template [[manage-time-series-data-without-data-streams]] === Manage time series data without data streams -Even though <> are a convenient way to scale -and manage time series data, they are designed to be append-only. We recognise there -might be use-cases where data needs to be updated or deleted in place and the -data streams don't support delete and update requests directly, -so the index APIs would need to be used directly on the data stream's backing indices. - -In these cases, you can use an index alias to manage indices containing the time series data -and periodically roll over to a new index. +Even though <> are a convenient way to scale and manage time series +data, they are designed to be append-only. We recognise there might be use-cases where data needs to +be updated or deleted in place and the data streams don't support delete and update requests +directly, so the index APIs would need to be used directly on the data stream's backing indices. In +these cases we still recommend using a data stream. + +If you frequently send multiple documents using the same `_id` expecting last-write-wins, you can +use an index alias instead of a data stream to manage indices containing the time series data and +periodically roll over to a new index. To automate rollover and management of time series indices with {ilm-init} using an index alias, you: diff --git a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc index 79be6205a8c88..b6310050a4f25 100644 --- a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc @@ -81,6 +81,8 @@ To use a policy that triggers the rollover action, you need to configure the policy in the index template used to create each new index. You specify the name of the policy and the alias used to reference the rolling indices. +TIP: An `index.lifecycle.rollover_alias` setting is only required if using {ilm} with an alias. It is unnecessary when using <>. + You can use the {kib} Create template wizard to create a template. To access the wizard, open the menu and go to *Stack Management > Index Management*. In the *Index Templates* tab, click *Create template*. @@ -128,8 +130,9 @@ DELETE _index_template/my_template [[create-initial-index]] ==== Create an initial managed index -When you set up policies for your own rolling indices, you need to manually create the first index -managed by a policy and designate it as the write index. +When you set up policies for your own rolling indices, if you are not using the recommended +<>, you need to manually create the first index managed by a policy and +designate it as the write index. IMPORTANT: When you enable {ilm} for {beats} or the {ls} {es} output plugin, the necessary policies and configuration changes are applied automatically. diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 87a865b9487e5..6df1993175a0d 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -18,7 +18,7 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-request]] ==== {api-request-title} -`PUT /_inference//` +`PUT /_inference//` [discrete] diff --git a/docs/reference/migration/migrate_8_13.asciidoc b/docs/reference/migration/migrate_8_13.asciidoc index c9e726d940b1d..dca10671e57bc 100644 --- a/docs/reference/migration/migrate_8_13.asciidoc +++ b/docs/reference/migration/migrate_8_13.asciidoc @@ -16,14 +16,17 @@ coming::[8.13.0] [[breaking-changes-8.13]] === Breaking changes -The following changes in {es} 8.13 might affect your applications -and prevent them from operating normally. -Before upgrading to 8.13, review these changes and take the described steps -to mitigate the impact. +There are no breaking changes in 8.13. - -There are no notable breaking changes in {es} 8.13. -But there are some less critical breaking changes. +[discrete] +[[migrate-notable-changes-8.13]] +=== Notable changes +The following are notable, non-breaking updates to be aware of: + +* Changes to features that are in Technical Preview. +* Changes to log formats. +* Changes to non-public APIs. +* Behaviour changes that repair critical bugs. [discrete] [[breaking_813_index_setting_changes]] diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index 1ab5de76a94b0..e4e10e2ae2fc5 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -537,4 +537,4 @@ The API returns the following results: // TESTRESPONSE[s/"job_version" : "8.4.0"/"job_version" : $body.job_version/] // TESTRESPONSE[s/1656087283340/$body.$_path/] // TESTRESPONSE[s/"superuser"/"_es_test_root"/] -// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"false"/] +// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"exclude"/] diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index e3c8da281f2a1..05c97d51a38e7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,8 @@ This section summarizes the changes in each release. * <> +* <> +* <> * <> * <> * <> @@ -63,6 +65,8 @@ This section summarizes the changes in each release. -- include::release-notes/8.14.0.asciidoc[] +include::release-notes/8.13.2.asciidoc[] +include::release-notes/8.13.1.asciidoc[] include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.2.asciidoc[] include::release-notes/8.12.1.asciidoc[] diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 47855773d0543..99ee4e5fb86e1 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -7,6 +7,9 @@ Also see <>. [float] === Known issues +* Due to a bug in the bundled JDK 22 nodes might crash abruptly under high memory pressure. + We recommend <> asap to mitigate the issue. + * Nodes upgraded to 8.13.0 fail to load downsampling persistent tasks. This prevents them from joining the cluster, blocking its upgrade (issue: {es-issue}106880[#106880]) + This affects clusters running version 8.10 or later, with an active downsampling diff --git a/docs/reference/release-notes/8.13.1.asciidoc b/docs/reference/release-notes/8.13.1.asciidoc new file mode 100644 index 0000000000000..9f5f34d27eb79 --- /dev/null +++ b/docs/reference/release-notes/8.13.1.asciidoc @@ -0,0 +1,33 @@ +[[release-notes-8.13.1]] +== {es} version 8.13.1 + +Also see <>. + +[[bug-8.13.1]] +[float] +=== Bug fixes + +Aggregations:: +* Add test to exercise reduction of terms aggregation order by key {es-pull}106799[#106799] + +Downsampling:: +* Gate reading of optional string array for bwc {es-pull}106878[#106878] + +Machine Learning:: +* Fix Array out of bounds exception in the XLM Roberta tokenizer {es-pull}106655[#106655] + +Search:: +* Fix concurrency bug in `AbstractStringScriptFieldAutomatonQuery` {es-pull}106678[#106678] (issue: {es-issue}105911[#105911]) +* Fix the background set of significant terms aggregations in case the data is in different shards than the foreground set {es-pull}106564[#106564] + +Transform:: +* Fail checkpoint on missing clusters {es-pull}106793[#106793] (issues: {es-issue}104533[#104533], {es-issue}106790[#106790]) + +[[enhancement-8.13.1]] +[float] +=== Enhancements + +Transform:: +* Raise loglevel of events related to transform lifecycle from DEBUG to INFO {es-pull}106602[#106602] + + diff --git a/docs/reference/release-notes/8.13.2.asciidoc b/docs/reference/release-notes/8.13.2.asciidoc new file mode 100644 index 0000000000000..1da23b5125833 --- /dev/null +++ b/docs/reference/release-notes/8.13.2.asciidoc @@ -0,0 +1,31 @@ +[[release-notes-8.13.2]] +== {es} version 8.13.2 + +Also see <>. + +[[bug-8.13.2]] +[float] +=== Bug fixes + +Aggregations:: +* Address concurrency issue in top hits aggregation {es-pull}106990[#106990] + +Application:: +* [Connector API] Support numeric for configuration select option value type {es-pull}107059[#107059] + +Downsampling:: +* Fix a downsample persistent task assignment bug {es-pull}106247[#106247] +* Fix downsample action request serialization {es-pull}106920[#106920] + +ES|QL:: +* ESQL: Fix fully pruned aggregates {es-pull}106673[#106673] (issue: {es-issue}106427[#106427]) + +[[enhancement-8.13.2]] +[float] +=== Enhancements + +Security:: +* Query API Key Information API support for the `typed_keys` request parameter {es-pull}106873[#106873] (issue: {es-issue}106817[#106817]) +* Query API Keys support for both `aggs` and `aggregations` keywords {es-pull}107054[#107054] (issue: {es-issue}106839[#106839]) + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 25096779521e4..8d9d743a239f5 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -31,46 +31,20 @@ endif::[] // tag::notable-highlights[] [discrete] -[[improve_storage_efficiency_for_non_metric_fields_in_tsdb]] -=== Improve storage efficiency for non-metric fields in TSDB -Adds a new `doc_values` encoding for non-metric fields in TSDB that takes advantage of TSDB's index sorting. -While terms that are used in multiple documents (such as the host name) are already stored only once in the terms dictionary, -there are a lot of repetitions in the references to the terms dictionary that are stored in `doc_values` (ordinals). -In TSDB, documents (and therefore `doc_values`) are implicitly sorted by dimenstions and timestamp. -This means that for each time series, we are storing long consecutive runs of the same ordinal. -With this change, we are introducing an encoding that detects and efficiently stores runs of the same value (such as `1 1 1 2 2 2 …`), -and runs of cycling values (such as `1 2 1 2 …`). -In our testing, we have seen a reduction in storage size by about 13%. -The effectiveness of this encoding depends on how many non-metric fields, such as dimensions, are used. -The more non-metric fields, the more effective this improvement will be. +[[add_global_retention_in_data_stream_lifecycle]] +=== Add global retention in data stream lifecycle +Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention +allows us to configure two different retentions: -{es-pull}99747[#99747] +- `default_retention` is applied to all data streams managed by the data stream lifecycle that do not have retention +defined on the data stream level. +- `max_retention` is applied to all data streams managed by the data stream lifecycle and it allows any data stream +data to be deleted after the `max_retention` has passed. -[discrete] -[[ga_release_of_synonyms_api]] -=== GA Release of Synonyms API -Removes the beta label for the Synonyms API to make it GA. - -{es-pull}103223[#103223] - -[discrete] -[[flag_in_field_caps_to_return_only_fields_with_values_in_index]] -=== Flag in `_field_caps` to return only fields with values in index -We added support for filtering the field capabilities API output by removing -fields that don't have a value. This can be done through the newly added -`include_empty_fields` parameter, which defaults to true. - -{es-pull}103651[#103651] - -[discrete] -[[new_lucene_9_10_release]] -=== New Lucene 9.10 release -- https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. -- https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search -- https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. -- https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. +Furthermore, we introduce the term `effective_retention` which is the retention applied at a certain moment to a data +stream considering all the available retention configurations. -{es-pull}105578[#105578] +{es-pull}105682[#105682] // end::notable-highlights[] diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 1d6df60df0f88..6191f33f1c5dd 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -251,7 +251,7 @@ public void reenableWatcher() throws Exception { if (isWatcherTest()) { assertBusy(() -> { ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped": @@ -261,7 +261,7 @@ public void reenableWatcher() throws Exception { emptyList(), emptyMap() ); - boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); + boolean isAcknowledged = startResponse.evaluate("acknowledged"); assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); case "stopping": diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 40ae43684d110..ba8b48355af17 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1694,25 +1694,25 @@ - - - + + + - - + + - - - + + + - - + + - - - + + + diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 6c06511ccfbd1..a0a391a0f019b 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1791,7 +1791,8 @@ public ClusterState execute(ClusterState currentState) throws Exception { original.getLifecycle(), original.isFailureStore(), original.getFailureIndices(), - null + original.rolloverOnWrite(), + original.getAutoShardingEvent() ); brokenDataStreamHolder.set(broken); return ClusterState.builder(currentState) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java index a6b235e8d566f..1d8de6b9ac5f6 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java @@ -75,7 +75,7 @@ public void testGetIndexApi() throws IOException { assertThat(indices.containsKey(failureStoreIndex), is(true)); } { - final Response indicesResponse = client().performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "?failure_store=false")); + final Response indicesResponse = client().performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "?failure_store=exclude")); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(1)); assertThat(indices.containsKey(backingIndex), is(true)); @@ -98,7 +98,7 @@ public void testGetIndexStatsApi() throws IOException { } { final Response statsResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_stats?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_stats?failure_store=include") ); Map indices = (Map) entityAsMap(statsResponse).get("indices"); assertThat(indices.size(), is(2)); @@ -124,7 +124,7 @@ public void testGetIndexSettingsApi() throws IOException { } { final Response indicesResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_settings?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_settings?failure_store=include") ); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(2)); @@ -150,7 +150,7 @@ public void testGetIndexMappingApi() throws IOException { } { final Response indicesResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=include") ); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(2)); @@ -183,7 +183,7 @@ public void testPutIndexMappingApi() throws IOException { assertAcknowledged(client().performRequest(mappingRequest)); } { - final Request mappingRequest = new Request("PUT", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=true"); + final Request mappingRequest = new Request("PUT", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=include"); mappingRequest.setJsonEntity(""" { "properties": { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index 01ad1bb09b20f..11446a2a2a761 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -315,7 +315,8 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreatedTimeSettingsMi ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), - null + ds.rolloverOnWrite(), + ds.getAutoShardingEvent() ) ); Metadata metadata = mb.build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java index abd5132edde16..1c63deadf92a4 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java @@ -154,7 +154,8 @@ public void testUpdateTimeSeriesTemporalRange_NoUpdateBecauseReplicated() { d.getLifecycle(), d.isFailureStore(), d.getFailureIndices(), - null + false, + d.getAutoShardingEvent() ) ) .build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 2118c98b377bc..9fc646995bc0e 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -89,6 +89,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti new DataStreamLifecycle(), true, failureStores, + false, null ); @@ -199,6 +200,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti new DataStreamLifecycle(null, null, false), true, failureStores, + false, null ); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index d0456d669663d..a67fa72cb3079 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -296,7 +296,8 @@ public void testRetentionNotExecutedForTSIndicesWithinTimeBounds() { DataStreamLifecycle.newBuilder().dataRetention(0L).build(), dataStream.isFailureStore(), dataStream.getFailureIndices(), - null + dataStream.rolloverOnWrite(), + dataStream.getAutoShardingEvent() ) ); clusterState = ClusterState.builder(clusterState).metadata(builder).build(); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml index 82c757fc4af76..46d46e8291ae9 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -32,7 +32,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } @@ -67,7 +67,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true body: conditions: max_docs: 1 @@ -96,7 +96,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true body: conditions: max_docs: 1 diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index b867fcfb905ea..11d060002955a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.AbstractRefCounted; @@ -38,16 +37,6 @@ public static class Item implements Writeable { private final SearchTemplateResponse response; private final Exception exception; - private Item(StreamInput in) throws IOException { - if (in.readBoolean()) { - this.response = new SearchTemplateResponse(in); - this.exception = null; - } else { - exception = in.readException(); - this.response = null; - } - } - public Item(SearchTemplateResponse response, Exception exception) { this.response = response; this.exception = exception; @@ -114,16 +103,6 @@ protected void closeInternal() { } }); - MultiSearchTemplateResponse(StreamInput in) throws IOException { - super(in); - items = in.readArray(Item::new, Item[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_0_0)) { - tookInMillis = in.readVLong(); - } else { - tookInMillis = -1L; - } - } - MultiSearchTemplateResponse(Item[] items, long tookInMillis) { this.items = items; this.tookInMillis = tookInMillis; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index c698a603055ad..8c1a410ee8a66 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -67,7 +67,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestSearchTemplateAction(namedWriteableRegistry, clusterSupportsFeature), + new RestSearchTemplateAction(clusterSupportsFeature), new RestMultiSearchTemplateAction(settings), new RestRenderSearchTemplateAction() ); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index a29c10b7501f1..ab1f90bc59933 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; @@ -36,11 +35,9 @@ public class RestSearchTemplateAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSearchTemplateAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { - this.namedWriteableRegistry = namedWriteableRegistry; + public RestSearchTemplateAction(Predicate clusterSupportsFeature) { this.clusterSupportsFeature = clusterSupportsFeature; } @@ -73,7 +70,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client searchRequest, request, null, - namedWriteableRegistry, clusterSupportsFeature, size -> searchRequest.source().size(size) ); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 39da4066a7859..2e62f6e9c96f4 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.AbstractRefCounted; @@ -46,12 +45,6 @@ protected void closeInternal() { SearchTemplateResponse() {} - SearchTemplateResponse(StreamInput in) throws IOException { - super(in); - source = in.readOptionalBytesReference(); - response = in.readOptionalWriteable(SearchResponse::new); - } - public BytesReference getSource() { return source; } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index 1efa0ada221ef..1f1955e5ca171 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -28,7 +27,7 @@ public final class RestSearchTemplateActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestSearchTemplateAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestSearchTemplateAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java index 6d88ff1e8db6a..3c183830afa6d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java @@ -108,12 +108,6 @@ public Response(List scriptContextNames, PainlessContextInfo painlessCon this.painlessContextInfo = painlessContextInfo; } - public Response(StreamInput in) throws IOException { - super(in); - scriptContextNames = in.readStringCollectionAsList(); - painlessContextInfo = in.readOptionalWriteable(PainlessContextInfo::new); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(scriptContextNames); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 7f5f1fe4f84ea..6ab5fc724c711 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -462,7 +462,7 @@ static boolean needDocumentAndIndex(ScriptContext scriptContext) { public static class Response extends ActionResponse implements ToXContentObject { - private Object result; + private final Object result; Response(Object result) { this.result = result; diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index e2b932b01a516..09507ae926f44 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -340,7 +340,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); return new ScaledFloatIndexFieldData(scaledValues, scalingFactor, ScaledFloatDocValuesField::new); }; @@ -608,6 +609,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; // We don't know how to take advantage of the index with half floats anyway + } + @Override public NumericType getNumericType() { /* diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java index b9ca544e7532d..9f559c8f55858 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java @@ -49,7 +49,11 @@ public void testBasics() throws Exception { .endObject() ); - Mapping parsedMapping = createMapperService(mapping).parseMapping("type", new CompressedXContent(mapping)); + Mapping parsedMapping = createMapperService(mapping).parseMapping( + "type", + MapperService.MergeReason.MAPPING_UPDATE, + new CompressedXContent(mapping) + ); assertEquals(mapping, parsedMapping.toCompressedXContent().toString()); assertNotNull(parsedMapping.getMetadataMapperByClass(RankFeatureMetaFieldMapper.class)); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 46b9e365fd0ea..4adc7f9b5ba27 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.TestDocumentParserContext; @@ -206,7 +207,7 @@ public void init() throws Exception { .endObject() .endObject() ); - mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("doc", new CompressedXContent(mapper), MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { @@ -223,7 +224,7 @@ private void addQueryFieldMappings() throws Exception { .endObject() .endObject() ); - mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName); } @@ -699,7 +700,7 @@ public void testAllowNoAdditionalSettings() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, () -> indexServiceWithoutSettings.mapperService() - .merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE) + .merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE) ); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } @@ -722,7 +723,7 @@ public void testMultiplePercolatorFields() throws Exception { .endObject() .endObject() ); - mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper() @@ -763,7 +764,7 @@ public void testNestedPercolatorField() throws Exception { .endObject() .endObject() ); - mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper() @@ -912,7 +913,7 @@ public void testEmptyName() throws Exception { ); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("type1", new CompressedXContent(mapping)) + () -> mapperService.parseMapping("type1", MergeReason.MAPPING_UPDATE, new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("field name cannot be an empty string")); } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java index 061d8292b3e5f..fe6da7fe1ce68 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java @@ -30,11 +30,11 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject { /** The overall evaluation result. */ - private double metricScore; + private final double metricScore; /** details about individual ranking evaluation queries, keyed by their id */ - private Map details; + private final Map details; /** exceptions for specific ranking evaluation queries, keyed by their id */ - private Map failures; + private final Map failures; public RankEvalResponse(double metricScore, Map partialResults, Map failures) { this.metricScore = metricScore; diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java index 48c50450656f3..6643e2e9d20ea 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.BulkByScrollTask; @@ -39,15 +38,10 @@ protected AbstractBaseReindexRestHandler(A action) { this.action = action; } - protected RestChannelConsumer doPrepareRequest( - RestRequest request, - NamedWriteableRegistry namedWriteableRegistry, - NodeClient client, - boolean includeCreated, - boolean includeUpdated - ) throws IOException { + protected RestChannelConsumer doPrepareRequest(RestRequest request, NodeClient client, boolean includeCreated, boolean includeUpdated) + throws IOException { // Build the internal request - Request internal = setCommonOptions(request, buildRequest(request, namedWriteableRegistry)); + Request internal = setCommonOptions(request, buildRequest(request)); // Executes the request and waits for completion if (request.paramAsBoolean("wait_for_completion", true)) { @@ -78,7 +72,7 @@ protected RestChannelConsumer doPrepareRequest( /** * Build the Request based on the RestRequest. */ - protected abstract Request buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException; + protected abstract Request buildRequest(RestRequest request) throws IOException; /** * Sets common options of {@link AbstractBulkByScrollRequest} requests. diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java index cb0d09f1f2450..8cf7d2200ad36 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; @@ -45,7 +44,6 @@ protected AbstractBulkByQueryRestHandler(A action) { protected void parseInternalRequest( Request internal, RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, Map> bodyConsumers ) throws IOException { @@ -58,14 +56,7 @@ protected void parseInternalRequest( IntConsumer sizeConsumer = restRequest.getRestApiVersion() == RestApiVersion.V_7 ? size -> setMaxDocsFromSearchSize(internal, size) : size -> failOnSizeSpecified(); - RestSearchAction.parseSearchRequest( - searchRequest, - restRequest, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - sizeConsumer - ); + RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, clusterSupportsFeature, sizeConsumer); } searchRequest.source().size(restRequest.paramAsInt("scroll_size", searchRequest.source().size())); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java index 8cdfc77db6f7f..1a40f77250e5f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java @@ -76,9 +76,9 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestReindexAction(namedWriteableRegistry, clusterSupportsFeature), - new RestUpdateByQueryAction(namedWriteableRegistry, clusterSupportsFeature), - new RestDeleteByQueryAction(namedWriteableRegistry, clusterSupportsFeature), + new RestReindexAction(clusterSupportsFeature), + new RestUpdateByQueryAction(clusterSupportsFeature), + new RestDeleteByQueryAction(clusterSupportsFeature), new RestRethrottleAction(nodesInCluster) ); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java index cc98dc06575b8..ff0ef1282b2d9 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.DeleteByQueryAction; @@ -31,12 +30,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestDeleteByQueryAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestDeleteByQueryAction(Predicate clusterSupportsFeature) { super(DeleteByQueryAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -58,11 +55,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, false, false); + return doPrepareRequest(request, client, false, false); } @Override - protected DeleteByQueryRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected DeleteByQueryRequest buildRequest(RestRequest request) throws IOException { /* * Passing the search request through DeleteByQueryRequest first allows * it to set its own defaults which differ from SearchRequest's @@ -74,7 +71,7 @@ protected DeleteByQueryRequest buildRequest(RestRequest request, NamedWriteableR consumers.put("conflicts", o -> internal.setConflicts((String) o)); consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue())); - parseInternalRequest(internal, request, namedWriteableRegistry, clusterSupportsFeature, consumers); + parseInternalRequest(internal, request, clusterSupportsFeature, consumers); return internal; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java index 253fd581cfceb..a693b0babaa9f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; @@ -34,12 +33,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestReindexAction extends AbstractBaseReindexRestHandler implements RestRequestFilter { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestReindexAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestReindexAction(Predicate clusterSupportsFeature) { super(ReindexAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -55,11 +52,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, true, true); + return doPrepareRequest(request, client, true, true); } @Override - protected ReindexRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected ReindexRequest buildRequest(RestRequest request) throws IOException { if (request.hasParam("pipeline")) { throw new IllegalArgumentException( "_reindex doesn't support [pipeline] as a query parameter. Specify it in the [dest] object instead." diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java index 50536a164727a..2a6146b9fad1c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.UpdateByQueryAction; @@ -32,12 +31,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestUpdateByQueryAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestUpdateByQueryAction(Predicate clusterSupportsFeature) { super(UpdateByQueryAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -58,11 +55,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, false, true); + return doPrepareRequest(request, client, false, true); } @Override - protected UpdateByQueryRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException { if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { request.param("type"); } @@ -78,7 +75,7 @@ protected UpdateByQueryRequest buildRequest(RestRequest request, NamedWriteableR consumers.put("script", o -> internal.setScript(Script.parse(o))); consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue())); - parseInternalRequest(internal, request, namedWriteableRegistry, clusterSupportsFeature, consumers); + parseInternalRequest(internal, request, clusterSupportsFeature, consumers); internal.setPipeline(request.param("pipeline")); return internal; diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java index aa457fae9e377..013eace19f1b5 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.reindex; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -31,7 +30,7 @@ public final class RestDeleteByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestDeleteByQueryAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestDeleteByQueryAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java index ddb8c2ce0225d..2e1810482bb5f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.test.rest.FakeRestRequest; @@ -21,10 +20,8 @@ import org.junit.Before; import java.io.IOException; -import java.util.Collections; import static java.util.Collections.singletonMap; -import static org.mockito.Mockito.mock; public class RestReindexActionTests extends RestActionTestCase { @@ -32,7 +29,7 @@ public class RestReindexActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestReindexAction(mock(NamedWriteableRegistry.class), nf -> false); + action = new RestReindexAction(nf -> false); controller().registerHandler(action); } @@ -56,10 +53,7 @@ public void testPipelineQueryParameterIsError() throws IOException { request.withContent(BytesReference.bytes(body), body.contentType()); } request.withParams(singletonMap("pipeline", "doesn't matter")); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> action.buildRequest(request.build(), new NamedWriteableRegistry(Collections.emptyList())) - ); + Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build())); assertEquals("_reindex doesn't support [pipeline] as a query parameter. Specify it in the [dest] object instead.", e.getMessage()); } @@ -68,14 +62,14 @@ public void testSetScrollTimeout() throws IOException { { FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); - ReindexRequest request = action.buildRequest(requestBuilder.build(), new NamedWriteableRegistry(Collections.emptyList())); + ReindexRequest request = action.buildRequest(requestBuilder.build()); assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT, request.getScrollTime()); } { FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withParams(singletonMap("scroll", "10m")); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); - ReindexRequest request = action.buildRequest(requestBuilder.build(), new NamedWriteableRegistry(Collections.emptyList())); + ReindexRequest request = action.buildRequest(requestBuilder.build()); assertEquals("10m", request.getScrollTime().toString()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java index a3f468df89e1e..b83f11a91d1b8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.reindex; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -31,7 +30,7 @@ public final class RestUpdateByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestUpdateByQueryAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestUpdateByQueryAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index cf3bc21526bf6..13e582598a2d2 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -267,7 +267,7 @@ protected S3Repository createRepository( ) { return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, s3RepositoriesMetrics) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 50470ec499ef6..ff61504d6c525 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -132,7 +132,7 @@ private S3Repository createS3Repo(RepositoryMetadata metadata) { S3RepositoriesMetrics.NOOP ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java index 00abf1e77fd57..a02bff59988d8 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java @@ -43,7 +43,7 @@ private URLRepository createRepository(Settings baseSettings, RepositoryMetadata mock(URLHttpClient.Factory.class) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 08a133bcb69c8..44f52105f64c9 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -80,7 +80,7 @@ public String typeName() { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new, isIndexed()); } @Override diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 695f5d2a64bc7..0f210ee4b2450 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -11,7 +11,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.hamcrest.Matchers; @@ -21,7 +20,6 @@ import static org.hamcrest.CoreMatchers.equalTo; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106933") public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { @@ -36,7 +34,8 @@ public void testHealthNode() throws Exception { assertThat(tasks, Matchers.containsString("health-node")); }); assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "_health_report")); + String path = clusterHasFeature("health.supports_health_report_api") ? "_health_report" : "_internal/_health"; + Response response = client().performRequest(new Request("GET", path)); Map health_report = entityAsMap(response.getEntity()); assertThat(health_report.get("status"), equalTo("green")); }); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index e04786ec14cf7..299c24f987d8d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -64,7 +64,7 @@ "default":"false", "description":"If set to true, the rollover action will only mark a data stream to signal that it needs to be rolled over at the next write. Only allowed on data streams." }, - "failure_store":{ + "target_failure_store":{ "type":"boolean", "description":"If set to true, the rollover action will be applied on the failure store of the data stream.", "visibility": "feature_flag", diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml index 784edfdac3469..407313a59c5e8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml @@ -147,3 +147,30 @@ setup: - match: { test_default_index_options.mappings.properties.vector.index: true } - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } - match: { test_default_index_options.mappings.properties.vector.index_options.type: int8_hnsw } +--- +"Default index options for dense_vector element type byte": + - skip: + version: ' - 8.13.99' + reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' + - do: + indices.create: + index: test_default_index_options + body: + mappings: + properties: + vector: + element_type: byte + type: dense_vector + dims: 5 + + - match: { acknowledged: true } + + - do: + indices.get_mapping: + index: test_default_index_options + + - match: { test_default_index_options.mappings.properties.vector.type: dense_vector } + - match: { test_default_index_options.mappings.properties.vector.dims: 5 } + - match: { test_default_index_options.mappings.properties.vector.index: true } + - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } + - is_false: test_default_index_options.mappings.properties.vector.index_options.type diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java index 206aa57bc84b3..da89f3252bec0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java @@ -301,7 +301,8 @@ public void testWriteCanBeRejectedAtCoordinatingLevel() throws Exception { public void testWriteCanBeRejectedAtPrimaryLevel() throws Exception { final BulkRequest bulkRequest = new BulkRequest(); long totalRequestSize = 0; - for (int i = 0; i < 80; ++i) { + int numberOfIndexRequests = randomIntBetween(50, 100); + for (int i = 0; i < numberOfIndexRequests; ++i) { IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID()) .source(Collections.singletonMap("key", randomAlphaOfLength(50))); totalRequestSize += request.ramBytesUsed(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 6cca0ccb3fdf3..97f052367fbc6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -164,11 +164,11 @@ public void testNodeIndexingMetricsArePublishing() throws Exception { ); assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); - var primaryOperationsRejectionsRatio = getRecordedMetric( + var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( plugin::getDoubleGaugeMeasurement, - "es.indexing.primary_operations.rejections.ratio" + "es.indexing.primary_operations.document.rejections.ratio" ); - assertThat(primaryOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + assertThat(primaryOperationsDocumentRejectionsRatio.getDouble(), equalTo(0.0)); }); @@ -207,13 +207,19 @@ public void testCoordinatingRejectionMetricsArePublishing() throws Exception { "es.indexing.coordinating_operations.rejections.total" ); assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); + + var coordinatingOperationsRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.coordinating_operations.rejections.ratio" + ); + assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(1.0)); }); } - public void testPrimaryRejectionMetricsArePublishing() throws Exception { + public void testPrimaryDocumentRejectionMetricsArePublishing() throws Exception { // setting low Indexing Pressure limits to trigger primary rejections - final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB").build()); + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "2KB").build()); // setting high Indexing Pressure limits to pass coordinating checks final String coordinatingNode = internalCluster().startCoordinatingOnlyNode( Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "10MB").build() @@ -227,19 +233,32 @@ public void testPrimaryRejectionMetricsArePublishing() throws Exception { plugin.resetMeter(); final int numberOfShards = randomIntBetween(1, 5); - assertAcked(prepareCreate("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + assertAcked(prepareCreate("test-one", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + assertAcked(prepareCreate("test-two", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get()); - final BulkRequest bulkRequest = new BulkRequest(); - final int batchCount = randomIntBetween(50, 100); - for (int i = 0; i < batchCount; i++) { - bulkRequest.add(new IndexRequest("test").source("field", randomAlphaOfLength(2048))); + final BulkRequest bulkRequestOne = new BulkRequest(); + final int batchCountOne = randomIntBetween(50, 100); + for (int i = 0; i < batchCountOne; i++) { + bulkRequestOne.add(new IndexRequest("test-one").source("field", randomAlphaOfLength(3096))); } - // big batch should pass thru coordinating limit check but fail on primary - // note the bulk request is sent to coordinating client - final BulkResponse bulkResponse = client(coordinatingNode).bulk(bulkRequest).actionGet(); - assertThat(bulkResponse.hasFailures(), equalTo(true)); - assertThat(Arrays.stream(bulkResponse.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), equalTo(true)); + final BulkRequest bulkRequestTwo = new BulkRequest(); + final int batchCountTwo = randomIntBetween(1, 5); + for (int i = 0; i < batchCountTwo; i++) { + bulkRequestTwo.add(new IndexRequest("test-two").source("field", randomAlphaOfLength(1))); + } + + // big batch should pass through coordinating gate but trip on primary gate + // note the bulk request is sent to coordinating node + final BulkResponse bulkResponseOne = client(coordinatingNode).bulk(bulkRequestOne).actionGet(); + assertThat(bulkResponseOne.hasFailures(), equalTo(true)); + assertThat( + Arrays.stream(bulkResponseOne.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), + equalTo(true) + ); + // small bulk request is expected to pass through primary indexing pressure gate + final BulkResponse bulkResponseTwo = client(coordinatingNode).bulk(bulkRequestTwo).actionGet(); + assertThat(bulkResponseTwo.hasFailures(), equalTo(false)); // simulate async apm `polling` call for metrics plugin.collect(); @@ -251,6 +270,16 @@ public void testPrimaryRejectionMetricsArePublishing() throws Exception { "es.indexing.primary_operations.rejections.total" ); assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo((long) numberOfShards)); + + var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.primary_operations.document.rejections.ratio" + ); + // ratio of rejected documents vs all indexing documents + assertThat( + equals(primaryOperationsDocumentRejectionsRatio.getDouble(), (double) batchCountOne / (batchCountOne + batchCountTwo)), + equalTo(true) + ); }); } @@ -261,4 +290,9 @@ private static Measurement getRecordedMetric(Function> assertThat(measurements.size(), equalTo(1)); return measurements.get(0); } + + private static boolean equals(double expected, double actual) { + final double eps = .0000001; + return Math.abs(expected - actual) < eps; + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 7d902cf140839..b8d1d45a6f85d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.source; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.ValidationException; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; @@ -81,6 +82,11 @@ public void testWithRouting() { prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); + assertResponse(prepareSearch("test"), response -> { + assertThat(response.getHits().getAt(0).getId(), notNullValue()); + assertThat(response.getHits().getAt(0).field("_routing"), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + }); assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); @@ -90,6 +96,40 @@ public void testWithRouting() { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); + + GetResponse getResponse = client().prepareGet("test", "1").setRouting("toto").get(); + assertTrue(getResponse.isExists()); + assertEquals("toto", getResponse.getFields().get("_routing").getValue()); + } + + public void testWithIgnored() { + assertAcked(prepareCreate("test").setMapping("ip", "type=ip,ignore_malformed=true")); + ensureGreen(); + + prepareIndex("test").setId("1").setSource("ip", "value").get(); + refresh(); + + assertResponse(prepareSearch("test"), response -> { + assertThat(response.getHits().getAt(0).getId(), notNullValue()); + assertThat(response.getHits().getAt(0).field("_ignored").getValue(), equalTo("ip")); + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + }); + assertResponse(prepareSearch("test").storedFields("_none_"), response -> { + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).field("_ignored"), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + }); + + { + GetResponse getResponse = client().prepareGet("test", "1").get(); + assertTrue(getResponse.isExists()); + assertThat(getResponse.getField("_ignored"), nullValue()); + } + { + GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_ignored").get(); + assertTrue(getResponse.isExists()); + assertEquals("ip", getResponse.getField("_ignored").getValue()); + } } public void testInvalid() { diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 83b8606da2997..abfea0b18b9d8 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import org.elasticsearch.index.codec.Elasticsearch814Codec; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.plugins.internal.RestExtension; @@ -243,6 +244,7 @@ exports org.elasticsearch.index.codec; exports org.elasticsearch.index.codec.tsdb; exports org.elasticsearch.index.codec.bloomfilter; + exports org.elasticsearch.index.codec.zstd; exports org.elasticsearch.index.engine; exports org.elasticsearch.index.fielddata; exports org.elasticsearch.index.fielddata.fieldcomparator; @@ -433,6 +435,7 @@ with org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat, org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat; + provides org.apache.lucene.codecs.Codec with Elasticsearch814Codec; exports org.elasticsearch.cluster.routing.allocation.shards to diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 57a3afe083707..4a1bf691ea1b0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -163,6 +163,7 @@ static TransportVersion def(int id) { public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0); public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); + public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 992308bd32018..99e811c021845 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -170,6 +170,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_0 = new Version(8_13_00_99); public static final Version V_8_13_1 = new Version(8_13_01_99); public static final Version V_8_13_2 = new Version(8_13_02_99); + public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version CURRENT = V_8_14_0; diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 7e03b495438d8..cd01184801c64 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -933,14 +933,12 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestBulkAction(settings)); registerHandler.accept(new RestUpdateAction()); - registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature)); + registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature)); registerHandler.accept(new RestSearchScrollAction()); registerHandler.accept(new RestClearScrollAction()); registerHandler.accept(new RestOpenPointInTimeAction()); registerHandler.accept(new RestClosePointInTimeAction()); - registerHandler.accept( - new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature) - ); + registerHandler.accept(new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), clusterSupportsFeature)); registerHandler.accept(new RestKnnSearchAction()); registerHandler.accept(new RestValidateQueryAction()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java index 39baf25f5dada..92413fc104be4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java @@ -22,7 +22,7 @@ */ public class ClusterAllocationExplainResponse extends ActionResponse implements ChunkedToXContentObject { - private ClusterAllocationExplanation cae; + private final ClusterAllocationExplanation cae; public ClusterAllocationExplainResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java index 89e1ede46d9a8..7a19e7b277a08 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.remote; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.transport.RemoteConnectionInfo; import org.elasticsearch.xcontent.ToXContentObject; @@ -23,11 +22,6 @@ public final class RemoteInfoResponse extends ActionResponse implements ToXConte private final List infos; - RemoteInfoResponse(StreamInput in) throws IOException { - super(in); - infos = in.readCollectionAsImmutableList(RemoteConnectionInfo::new); - } - public RemoteInfoResponse(Collection infos) { this.infos = List.copyOf(infos); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java index f4cc4e2f8f5d2..fca9121a3a858 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java @@ -23,9 +23,9 @@ */ public class ClusterStateResponse extends ActionResponse { - private ClusterName clusterName; - private ClusterState clusterState; - private boolean waitForTimedOut = false; + private final ClusterName clusterName; + private final ClusterState clusterState; + private final boolean waitForTimedOut; public ClusterStateResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index c421cdefbdbf4..6ffe7ac390260 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -782,6 +782,7 @@ static class IndexPressureStats implements ToXContentFragment { long coordinatingRejections = 0; long primaryRejections = 0; long replicaRejections = 0; + long primaryDocumentRejections = 0; long memoryLimit = 0; long totalCoordinatingOps = 0; @@ -811,6 +812,7 @@ static class IndexPressureStats implements ToXContentFragment { currentCoordinatingOps += nodeStatIndexingPressureStats.getCurrentCoordinatingOps(); currentPrimaryOps += nodeStatIndexingPressureStats.getCurrentPrimaryOps(); currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); + primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); } } indexingPressureStats = new IndexingPressureStats( @@ -831,7 +833,8 @@ static class IndexPressureStats implements ToXContentFragment { totalReplicaOps, currentCoordinatingOps, currentPrimaryOps, - currentReplicaOps + currentReplicaOps, + primaryDocumentRejections ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 24604a3977096..b33a718daee8a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -26,8 +26,8 @@ public class GetStoredScriptResponse extends ActionResponse implements ToXConten public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); public static final ParseField SCRIPT = new ParseField("script"); - private String id; - private StoredScriptSource source; + private final String id; + private final StoredScriptSource source; public GetStoredScriptResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java deleted file mode 100644 index 129c07b64fd4d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.shrink; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; - -public class ShrinkAction extends ActionType { - - public static final ShrinkAction INSTANCE = new ShrinkAction(); - public static final String NAME = "indices:admin/shrink"; - - private ShrinkAction() { - super(NAME); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 1e9b1446850af..412e4f3c875e8 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -74,16 +75,16 @@ final class BulkOperation extends ActionRunnable { private final long startTimeNanos; private final ClusterStateObserver observer; private final Map indicesThatCannotBeCreated; - private final String executorName; + private final Executor executor; private final LongSupplier relativeTimeProvider; private final FailureStoreDocumentConverter failureStoreDocumentConverter; - private IndexNameExpressionResolver indexNameExpressionResolver; - private NodeClient client; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final NodeClient client; BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -97,7 +98,7 @@ final class BulkOperation extends ActionRunnable { this( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -115,7 +116,7 @@ final class BulkOperation extends ActionRunnable { BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -137,7 +138,7 @@ final class BulkOperation extends ActionRunnable { this.listener = listener; this.startTimeNanos = startTimeNanos; this.indicesThatCannotBeCreated = indicesThatCannotBeCreated; - this.executorName = executorName; + this.executor = executor; this.relativeTimeProvider = relativeTimeProvider; this.indexNameExpressionResolver = indexNameExpressionResolver; this.client = client; @@ -543,7 +544,7 @@ public void onTimeout(TimeValue timeout) { } private void dispatchRetry() { - threadPool.executor(executorName).submit(operation); + executor.execute(operation); } }); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index bf50fd06d056b..3494701cf5b7a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -70,6 +70,7 @@ import java.util.Optional; import java.util.Set; import java.util.SortedMap; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -101,6 +102,9 @@ public class TransportBulkAction extends HandledTransportAction releasingListener = ActionListener.runBefore(listener, releasable::close); - final String executorName = isOnlySystem ? Names.SYSTEM_WRITE : Names.WRITE; - ensureClusterStateThenForkAndExecute(task, bulkRequest, executorName, releasingListener); + final Executor executor = isOnlySystem ? systemWriteExecutor : writeExecutor; + ensureClusterStateThenForkAndExecute(task, bulkRequest, executor, releasingListener); } private void ensureClusterStateThenForkAndExecute( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener releasingListener ) { final ClusterState initialState = clusterService.state(); @@ -274,7 +280,7 @@ private void ensureClusterStateThenForkAndExecute( clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } @Override @@ -288,20 +294,20 @@ public void onTimeout(TimeValue timeout) { } }, newState -> false == newState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)); } else { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } } - private void forkAndExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener releasingListener) { - threadPool.executor(executorName).execute(new ActionRunnable<>(releasingListener) { + private void forkAndExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener releasingListener) { + executor.execute(new ActionRunnable<>(releasingListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, releasingListener); + doInternalExecute(task, bulkRequest, executor, releasingListener); } }); } - protected void doInternalExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener listener) { + protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener) { final long startTime = relativeTime(); boolean hasIndexRequestsWithPipelines = false; @@ -334,7 +340,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec assert arePipelinesResolved : bulkRequest; } if (clusterService.localNode().isIngestNode()) { - processBulkIndexIngestRequest(task, bulkRequest, executorName, metadata, l); + processBulkIndexIngestRequest(task, bulkRequest, executor, metadata, l); } else { ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); } @@ -385,7 +391,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec createMissingIndicesAndIndexData( task, bulkRequest, - executorName, + executor, listener, indicesToAutoCreate, dataStreamsToBeRolledOver, @@ -401,7 +407,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToBeRolledOver, @@ -411,13 +417,13 @@ protected void createMissingIndicesAndIndexData( final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); // Optimizing when there are no prerequisite actions if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty()) { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); return; } - Runnable executeBulkRunnable = () -> threadPool.executor(executorName).execute(new ActionRunnable<>(listener) { + Runnable executeBulkRunnable = () -> executor.execute(new ActionRunnable<>(listener) { @Override protected void doRun() { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); } }); try (RefCountingRunnable refs = new RefCountingRunnable(executeBulkRunnable)) { @@ -636,14 +642,14 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { new BulkOperation( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -663,7 +669,7 @@ private long relativeTime() { private void processBulkIndexIngestRequest( Task task, BulkRequest original, - String executorName, + Executor executor, Metadata metadata, ActionListener listener ) { @@ -696,7 +702,7 @@ private void processBulkIndexIngestRequest( ActionRunnable runnable = new ActionRunnable<>(actionListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, actionListener); + doInternalExecute(task, bulkRequest, executor, actionListener); } @Override @@ -713,12 +719,12 @@ public boolean isForceExecution() { if (originalThread == Thread.currentThread()) { runnable.run(); } else { - threadPool.executor(executorName).execute(runnable); + executor.execute(runnable); } } } }, - executorName + executor ); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index f65d0f462fde6..1b3949f3c00ac 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -30,6 +30,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; public class TransportSimulateBulkAction extends TransportBulkAction { @Inject @@ -70,7 +71,7 @@ public TransportSimulateBulkAction( protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToRollover, diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java index 9e755cfd7f081..b10471e75fcf8 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.get; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,16 +26,6 @@ public MultiGetItemResponse(GetResponse response, MultiGetResponse.Failure failu this.failure = failure; } - MultiGetItemResponse(StreamInput in) throws IOException { - if (in.readBoolean()) { - failure = new MultiGetResponse.Failure(in); - response = null; - } else { - response = new GetResponse(in); - failure = null; - } - } - /** * The index name of the document. */ diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java index 4f548e227dcfb..3306ac874243c 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java @@ -109,11 +109,6 @@ public MultiGetResponse(MultiGetItemResponse[] responses) { this.responses = responses; } - MultiGetResponse(StreamInput in) throws IOException { - super(in); - responses = in.readArray(MultiGetItemResponse::new, MultiGetItemResponse[]::new); - } - public MultiGetItemResponse[] getResponses() { return this.responses; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index 396a5b63b3cd5..e7ad434e3ad7c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -20,9 +20,9 @@ import java.util.List; public class SimulatePipelineResponse extends ActionResponse implements ToXContentObject { - private String pipelineId; + private final String pipelineId; private boolean verbose; - private List results; + private final List results; public SimulatePipelineResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 1da114adb34f6..1f8470b3bcd01 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -769,8 +769,14 @@ public final void onFailure(Exception e) { listener.onFailure(e); } - @Override - public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { + /** + * Builds an request for the initial search phase. + * + * @param shardIt the target {@link SearchShardIterator} + * @param shardIndex the index of the shard that is used in the coordinator node to + * tiebreak results with identical sort values + */ + protected final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java index 3d00d18565756..de0d7b98ef851 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ParseField; @@ -34,12 +33,6 @@ public ClearScrollResponse(boolean succeeded, int numFreed) { this.numFreed = numFreed; } - public ClearScrollResponse(StreamInput in) throws IOException { - super(in); - succeeded = in.readBoolean(); - numFreed = in.readVInt(); - } - /** * @return Whether the attempt to clear a scroll was successful. */ diff --git a/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java index d8cbfa53ee8ca..09cd96289416f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java @@ -8,11 +8,8 @@ package org.elasticsearch.action.search; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; -import java.io.IOException; - import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; @@ -21,10 +18,6 @@ public ClosePointInTimeResponse(boolean succeeded, int numFreed) { super(succeeded, numFreed); } - public ClosePointInTimeResponse(StreamInput in) throws IOException { - super(in); - } - @Override public RestStatus status() { if (isSucceeded() || getNumFreed() > 0) { diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java index 82cb158a0c59a..fd565ad4878bf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -24,11 +23,6 @@ public OpenPointInTimeResponse(String pointInTimeId) { this.pointInTimeId = Objects.requireNonNull(pointInTimeId, "Point in time parameter must be not null"); } - public OpenPointInTimeResponse(StreamInput in) throws IOException { - super(in); - pointInTimeId = in.readString(); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(pointInTimeId); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index af9bcac8e3a33..c77c7e58efc7d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -15,7 +15,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import java.util.concurrent.Executor; @@ -115,15 +114,6 @@ default void sendReleaseSearchContext( } } - /** - * Builds an request for the initial search phase. - * - * @param shardIt the target {@link SearchShardIterator} - * @param shardIndex the index of the shard that is used in the coordinator node to - * tiebreak results with identical sort values - */ - ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex); - /** * Processes the phase transition from on phase to another. This method handles all errors that happen during the initial run execution * of the next phase. If there are no successful operations in the context when this method is executed the search is aborted and diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 3b03b1cf0a4f6..e46a7bd5f0ec2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -417,8 +417,8 @@ public record FailureStoreOptions(boolean includeRegularIndices, boolean include ToXContentFragment { public static final String FAILURE_STORE = "failure_store"; - public static final String INCLUDE_ALL = "true"; - public static final String INCLUDE_ONLY_REGULAR_INDICES = "false"; + public static final String INCLUDE_ALL = "include"; + public static final String INCLUDE_ONLY_REGULAR_INDICES = "exclude"; public static final String INCLUDE_ONLY_FAILURE_INDICES = "only"; public static final FailureStoreOptions DEFAULT = new FailureStoreOptions(true, false); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index d7ff0359bfd27..ac5b004886319 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -1183,8 +1183,8 @@ public PendingReplicationActions getPendingReplicationActions() { } public static class ReplicaResponse extends ActionResponse implements ReplicationOperation.ReplicaResponse { - private long localCheckpoint; - private long globalCheckpoint; + private final long localCheckpoint; + private final long globalCheckpoint; ReplicaResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java index 3e8290ad4fb4a..0aa565c87b4cd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -35,8 +35,8 @@ public class BaseTasksResponse extends ActionResponse { public static final String TASK_FAILURES = "task_failures"; public static final String NODE_FAILURES = "node_failures"; - private List taskFailures; - private List nodeFailures; + private final List taskFailures; + private final List nodeFailures; public BaseTasksResponse(List taskFailures, List nodeFailures) { this.taskFailures = taskFailures == null ? Collections.emptyList() : List.copyOf(taskFailures); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java index b631d30cfd8bb..5789c4910db09 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java @@ -91,11 +91,6 @@ public MultiTermVectorsResponse(MultiTermVectorsItemResponse[] responses) { this.responses = responses; } - public MultiTermVectorsResponse(StreamInput in) throws IOException { - super(in); - responses = in.readArray(MultiTermVectorsItemResponse::new, MultiTermVectorsItemResponse[]::new); - } - public MultiTermVectorsItemResponse[] getResponses() { return this.responses; } diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java index d931302740f19..69b897df4d76d 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java @@ -247,7 +247,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Opens one or more indices based on their index name. * - * @param indices The name of the indices to close + * @param indices The name of the indices to open */ OpenIndexRequestBuilder prepareOpen(String... indices); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 57ab7c431f7ea..364a1b31ceeba 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -119,40 +119,6 @@ public static boolean isFailureStoreEnabled() { @Nullable private final DataStreamAutoShardingEvent autoShardingEvent; - public DataStream( - String name, - List indices, - long generation, - Map metadata, - boolean hidden, - boolean replicated, - boolean system, - boolean allowCustomRouting, - IndexMode indexMode, - DataStreamLifecycle lifecycle, - boolean failureStore, - List failureIndices, - @Nullable DataStreamAutoShardingEvent autoShardingEvent - ) { - this( - name, - indices, - generation, - metadata, - hidden, - replicated, - system, - System::currentTimeMillis, - allowCustomRouting, - indexMode, - lifecycle, - failureStore, - failureIndices, - false, - autoShardingEvent - ); - } - public DataStream( String name, List indices, @@ -222,6 +188,7 @@ public DataStream( this.failureStore = failureStore; this.failureIndices = failureIndices; assert assertConsistent(this.indices); + assert replicated == false || rolloverOnWrite == false : "replicated data streams cannot be marked for lazy rollover"; this.rolloverOnWrite = rolloverOnWrite; this.autoShardingEvent = autoShardingEvent; } @@ -238,7 +205,22 @@ public DataStream( boolean allowCustomRouting, IndexMode indexMode ) { - this(name, indices, generation, metadata, hidden, replicated, system, allowCustomRouting, indexMode, null, false, List.of(), null); + this( + name, + indices, + generation, + metadata, + hidden, + replicated, + system, + allowCustomRouting, + indexMode, + null, + false, + List.of(), + false, + null + ); } private static boolean assertConsistent(List indices) { @@ -507,6 +489,7 @@ public DataStream unsafeRollover(Index writeIndex, long generation, boolean time lifecycle, failureStore, failureIndices, + false, autoShardingEvent ); } @@ -544,6 +527,7 @@ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) lifecycle, failureStore, failureIndices, + false, autoShardingEvent ); } @@ -646,6 +630,7 @@ public DataStream removeBackingIndex(Index index) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -692,6 +677,7 @@ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBacki lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -753,6 +739,7 @@ public DataStream addBackingIndex(Metadata clusterMetadata, Index index) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -810,6 +797,7 @@ public DataStream snapshot(Collection indicesInSnapshot) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index e8e8ca767cc34..b88292d4ed79b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -1246,32 +1246,36 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices and all the datastreams, considering the open/closed, system, and hidden context parameters. + * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the datastreams themselves or their backing indices. */ public static Collection resolveAll(Context context) { - List resolvedExpressions = resolveEmptyOrTrivialWildcard(context); - if (context.includeDataStreams() == false) { - return resolvedExpressions; - } else { - Stream dataStreamsAbstractions = context.getState() - .metadata() - .getIndicesLookup() - .values() - .stream() - .filter(indexAbstraction -> indexAbstraction.getType() == Type.DATA_STREAM) - .filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); - if (context.getOptions().expandWildcardsHidden() == false) { - dataStreamsAbstractions = dataStreamsAbstractions.filter(indexAbstraction -> indexAbstraction.isHidden() == false); - } - // dedup backing indices if expand hidden indices option is true - Set resolvedIncludingDataStreams = expandToOpenClosed(context, dataStreamsAbstractions).collect(Collectors.toSet()); - resolvedIncludingDataStreams.addAll(resolvedExpressions); - return resolvedIncludingDataStreams; + List concreteIndices = resolveEmptyOrTrivialWildcard(context); + + if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { + return concreteIndices; } + + Stream ias = context.getState() + .metadata() + .getIndicesLookup() + .values() + .stream() + .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) + .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + + Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); + resolved.addAll(concreteIndices); + return resolved; + } + + private static boolean shouldIncludeIfDataStream(IndexAbstraction ia, IndexNameExpressionResolver.Context context) { + return context.includeDataStreams() && ia.getType() == Type.DATA_STREAM; + } + + private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpressionResolver.Context context) { + return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 6d0b424cad8f2..3c3ff0d130f0a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -339,6 +339,7 @@ static ClusterState createDataStream( lifecycle == null && isDslOnlyMode ? DataStreamLifecycle.DEFAULT : lifecycle, template.getDataStreamTemplate().hasFailureStore(), failureIndices, + false, null ); Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(newDataStream); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index 4006bc8d1a94a..c03d40984e11c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -213,6 +213,7 @@ static ClusterState updateDataLifecycle( lifecycle, dataStream.isFailureStore(), dataStream.getFailureIndices(), + dataStream.rolloverOnWrite(), dataStream.getAutoShardingEvent() ) ); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 7e2c0849a6fad..3ca206eaddb28 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -133,6 +133,7 @@ private static ClusterState applyRequest( final CompressedXContent mappingUpdateSource = request.source(); final Metadata metadata = currentState.metadata(); final List updateList = new ArrayList<>(); + MergeReason reason = request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE; for (Index index : request.indices()) { MapperService mapperService = indexMapperServices.get(index); // IMPORTANT: always get the metadata from the state since it get's batched @@ -147,13 +148,8 @@ private static ClusterState applyRequest( updateList.add(indexMetadata); // try and parse it (no need to add it here) so we can bail early in case of parsing exception // first, simulate: just call merge and ignore the result - Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource); - MapperService.mergeMappings( - mapperService.documentMapper(), - mapping, - request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE, - mapperService.getIndexSettings() - ); + Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, reason, mappingUpdateSource); + MapperService.mergeMappings(mapperService.documentMapper(), mapping, reason, mapperService.getIndexSettings()); } Metadata.Builder builder = Metadata.builder(metadata); boolean updated = false; @@ -169,11 +165,7 @@ private static ClusterState applyRequest( if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = mapperService.merge( - MapperService.SINGLE_MAPPING_NAME, - mappingUpdateSource, - request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE - ); + DocumentMapper mergedMapper = mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource, reason); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java index bd97ec0c2f63f..056fc59b4fdd5 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java @@ -58,7 +58,7 @@ protected void writeTo(StreamOutput out, Writer nestedExceptionsWrite } @Override - protected String getExceptionName() { + public String getExceptionName() { return name; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 6bdec2380c344..276775a868665 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -114,19 +114,18 @@ public void writeTo(StreamOutput out) throws IOException { /** The oldest metadata format version that can be read. */ private static final int MIN_FORMAT_VERSION = 3; - /** Legacy versions of the metadata written before the keystore data. */ - public static final int V2_VERSION = 2; public static final int V3_VERSION = 3; public static final int V4_VERSION = 4; /** The version where lucene directory API changed from BE to LE. */ public static final int LE_VERSION = 5; - public static final int CURRENT_VERSION = LE_VERSION; + public static final int HIGHER_KDF_ITERATION_COUNT_VERSION = 6; + public static final int CURRENT_VERSION = HIGHER_KDF_ITERATION_COUNT_VERSION; /** The algorithm used to derive the cipher key from a password. */ private static final String KDF_ALGO = "PBKDF2WithHmacSHA512"; /** The number of iterations to derive the cipher key. */ - private static final int KDF_ITERS = 10000; + private static final int KDF_ITERS = 210000; /** * The number of bits for the cipher key. @@ -155,6 +154,7 @@ public void writeTo(StreamOutput out) throws IOException { // 3: FIPS compliant algos, ES 6.3 // 4: remove distinction between string/files, ES 6.8/7.1 // 5: Lucene directory API changed to LE, ES 8.0 + // 6: increase KDF iteration count, ES 8.14 /** The metadata format version used to read the current keystore wrapper. */ private final int formatVersion; @@ -317,8 +317,8 @@ public boolean hasPassword() { return hasPassword; } - private static Cipher createCipher(int opmode, char[] password, byte[] salt, byte[] iv) throws GeneralSecurityException { - PBEKeySpec keySpec = new PBEKeySpec(password, salt, KDF_ITERS, CIPHER_KEY_BITS); + private static Cipher createCipher(int opmode, char[] password, byte[] salt, byte[] iv, int kdfIters) throws GeneralSecurityException { + PBEKeySpec keySpec = new PBEKeySpec(password, salt, kdfIters, CIPHER_KEY_BITS); SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(KDF_ALGO); SecretKey secretKey; try { @@ -337,6 +337,11 @@ private static Cipher createCipher(int opmode, char[] password, byte[] salt, byt return cipher; } + private static int getKdfIterationCountForVersion(int formatVersion) { + // iteration count was increased in version 6; it was 10,000 in previous versions + return formatVersion < HIGHER_KDF_ITERATION_COUNT_VERSION ? 10000 : KDF_ITERS; + } + /** * Decrypts the underlying keystore data. * @@ -365,7 +370,7 @@ public void decrypt(char[] password) throws GeneralSecurityException, IOExceptio throw new SecurityException("Keystore has been corrupted or tampered with", e); } - Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv); + Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv, getKdfIterationCountForVersion(formatVersion)); try ( ByteArrayInputStream bytesStream = new ByteArrayInputStream(encryptedBytes); CipherInputStream cipherStream = new CipherInputStream(bytesStream, cipher); @@ -403,11 +408,11 @@ private static byte[] readByteArray(DataInput input) throws IOException { } /** Encrypt the keystore entries and return the encrypted data. */ - private byte[] encrypt(char[] password, byte[] salt, byte[] iv) throws GeneralSecurityException, IOException { + private byte[] encrypt(char[] password, byte[] salt, byte[] iv, int kdfIterationCount) throws GeneralSecurityException, IOException { assert isLoaded(); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv); + Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv, kdfIterationCount); try ( CipherOutputStream cipherStream = new CipherOutputStream(bytes, cipher); DataOutputStream output = new DataOutputStream(cipherStream) @@ -450,7 +455,7 @@ public synchronized void save(Path configDir, char[] password, boolean preserveP byte[] iv = new byte[12]; random.nextBytes(iv); // encrypted data - byte[] encryptedBytes = encrypt(password, salt, iv); + byte[] encryptedBytes = encrypt(password, salt, iv, getKdfIterationCountForVersion(CURRENT_VERSION)); // size of data block output.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 5fcb4684d3f8d..14c1d1e9ef6aa 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -33,6 +33,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +/** + * A collection of static methods to help create different ES Executor types. + */ public class EsExecutors { // although the available processors may technically change, for node sizing we use the number available at launch diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java index 4b3bcf7e7278f..32e6c8f5ca849 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java @@ -18,6 +18,7 @@ public class HealthFeatures implements FeatureSpecification { public static final NodeFeature SUPPORTS_HEALTH = new NodeFeature("health.supports_health"); + public static final NodeFeature SUPPORTS_HEALTH_REPORT_API = new NodeFeature("health.supports_health_report_api"); public static final NodeFeature SUPPORTS_SHARDS_CAPACITY_INDICATOR = new NodeFeature("health.shards_capacity_indicator"); public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator"); @@ -28,6 +29,13 @@ public Set getFeatures() { @Override public Map getHistoricalFeatures() { - return Map.of(SUPPORTS_HEALTH, Version.V_8_5_0, SUPPORTS_SHARDS_CAPACITY_INDICATOR, Version.V_8_8_0); + return Map.of( + SUPPORTS_HEALTH, + Version.V_8_5_0, // health accessible via /_internal/_health + SUPPORTS_HEALTH_REPORT_API, + Version.V_8_7_0, // health accessible via /_health_report + SUPPORTS_SHARDS_CAPACITY_INDICATOR, + Version.V_8_8_0 + ); } } diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index d0bc8ad980dde..7696cf99b75cd 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -52,6 +52,7 @@ public class IndexingPressure { private final AtomicLong coordinatingRejections = new AtomicLong(0); private final AtomicLong primaryRejections = new AtomicLong(0); private final AtomicLong replicaRejections = new AtomicLong(0); + private final AtomicLong primaryDocumentRejections = new AtomicLong(0); private final long primaryAndCoordinatingLimits; private final long replicaLimits; @@ -136,6 +137,7 @@ public Releasable markPrimaryOperationStarted(int operations, long bytes, boolea long totalBytesWithoutOperation = totalBytes - bytes; this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); this.primaryRejections.getAndIncrement(); + this.primaryDocumentRejections.addAndGet(operations); throw new EsRejectedExecutionException( "rejected execution of primary operation [" + "coordinating_and_primary_bytes=" @@ -218,7 +220,8 @@ public IndexingPressureStats stats() { totalReplicaOps.get(), currentCoordinatingOps.get(), currentPrimaryOps.get(), - currentReplicaOps.get() + currentReplicaOps.get(), + primaryDocumentRejections.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index d4771ba74e0fb..3ebcd1cb5b420 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -11,7 +11,9 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; import java.util.HashMap; @@ -25,22 +27,40 @@ */ public class CodecService { + public static final FeatureFlag ZSTD_STORED_FIELDS_FEATURE_FLAG = new FeatureFlag("zstd_stored_fields"); + private final Map codecs; public static final String DEFAULT_CODEC = "default"; + public static final String LEGACY_DEFAULT_CODEC = "legacy_default"; // escape hatch public static final String BEST_COMPRESSION_CODEC = "best_compression"; + public static final String LEGACY_BEST_COMPRESSION_CODEC = "legacy_best_compression"; // escape hatch + /** the raw unfiltered lucene default. useful for testing */ public static final String LUCENE_DEFAULT_CODEC = "lucene_default"; public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene99Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Lucene99Codec.Mode.BEST_COMPRESSION)); + + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { - codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays)); - codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); + codecs.put(DEFAULT_CODEC, legacyBestSpeedCodec); } + codecs.put(LEGACY_DEFAULT_CODEC, legacyBestSpeedCodec); + + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put( + BEST_COMPRESSION_CODEC, + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) + ); + } else { + codecs.put(BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + } + codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); for (String codec : Codec.availableCodecs()) { codecs.put(codec, Codec.forName(codec)); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java new file mode 100644 index 0000000000000..e85e05c87b083 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 8.14. This extends the Lucene 9.9 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch814Codec extends FilterCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch814Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch814Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch814Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch814Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch814", new Lucene99Codec()); + this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.defaultPostingsFormat = new Lucene99PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene99". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene99". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene95". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java new file mode 100644 index 0000000000000..a682d26b094e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.mapper.MapperService; + +/** + * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new + * changes in {@link PerFieldMapperCodec}. + */ +public final class LegacyPerFieldMapperCodec extends Lucene99Codec { + + private final PerFieldFormatSupplier formatSupplier; + + public LegacyPerFieldMapperCodec(Lucene99Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + super(compressionMode); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == getClass().getSuperclass() + : "LegacyPerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; + } + + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return formatSupplier.getPostingsFormatForField(field); + } + + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return formatSupplier.getKnnVectorsFormatForField(field); + } + + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return formatSupplier.getDocValuesFormatForField(field); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java new file mode 100644 index 0000000000000..81fc2c0b4a065 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat; +import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; +import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; + +import java.util.Objects; + +/** + * Class that encapsulates the logic of figuring out the most appropriate file format for a given field, across postings, doc values and + * vectors. + */ +public class PerFieldFormatSupplier { + + private final MapperService mapperService; + private final BigArrays bigArrays; + private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); + private final KnnVectorsFormat knnVectorsFormat = new Lucene99HnswVectorsFormat(); + private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; + private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + + private final ES812PostingsFormat es812PostingsFormat; + + public PerFieldFormatSupplier(MapperService mapperService, BigArrays bigArrays) { + this.mapperService = mapperService; + this.bigArrays = Objects.requireNonNull(bigArrays); + this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); + this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); + this.es812PostingsFormat = new ES812PostingsFormat(); + } + + public PostingsFormat getPostingsFormatForField(String field) { + if (useBloomFilter(field)) { + return bloomFilterPostingsFormat; + } + return internalGetPostingsFormatForField(field); + } + + private PostingsFormat internalGetPostingsFormatForField(String field) { + if (mapperService != null) { + final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); + if (format != null) { + return format; + } + } + // return our own posting format using PFOR + return es812PostingsFormat; + } + + boolean useBloomFilter(String field) { + if (mapperService == null) { + return false; + } + IndexSettings indexSettings = mapperService.getIndexSettings(); + if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { + // In case for time series indices, the _id isn't randomly generated, + // but based on dimension fields and timestamp field, so during indexing + // version/seq_no/term needs to be looked up and having a bloom filter + // can speed this up significantly. + return indexSettings.getMode() == IndexMode.TIME_SERIES + && IdFieldMapper.NAME.equals(field) + && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } else { + return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } + } + + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + if (mapperService != null) { + Mapper mapper = mapperService.mappingLookup().getMapper(field); + if (mapper instanceof DenseVectorFieldMapper vectorMapper) { + return vectorMapper.getKnnVectorsFormatForField(knnVectorsFormat); + } + } + return knnVectorsFormat; + } + + public DocValuesFormat getDocValuesFormatForField(String field) { + if (useTSDBDocValuesFormat(field)) { + return tsdbDocValuesFormat; + } + return docValuesFormat; + } + + boolean useTSDBDocValuesFormat(final String field) { + if (excludeFields(field)) { + return false; + } + + return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); + } + + private boolean excludeFields(String fieldName) { + // Avoid using tsdb codec for fields like _seq_no, _primary_term. + // But _tsid and _ts_routing_hash should always use the tsdb codec. + return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; + } + + private boolean isTimeSeriesModeIndex() { + return mapperService != null && IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index ae497af887d9c..6f88578260db3 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -12,19 +12,10 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; -import org.elasticsearch.index.codec.postings.ES812PostingsFormat; -import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; /** * {@link PerFieldMapperCodec This Lucene codec} provides the default @@ -34,93 +25,32 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Lucene99Codec { +public final class PerFieldMapperCodec extends Elasticsearch814Codec { - private final MapperService mapperService; - private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); - private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; - private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + private final PerFieldFormatSupplier formatSupplier; - private final ES812PostingsFormat es812PostingsFormat; - - static { - assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMapperCodec.class) - : "PerFieldMapperCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC; - } - - public PerFieldMapperCodec(Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); - this.mapperService = mapperService; - this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); - this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); - this.es812PostingsFormat = new ES812PostingsFormat(); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == delegate.getClass() + : "PerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; } @Override public PostingsFormat getPostingsFormatForField(String field) { - if (useBloomFilter(field)) { - return bloomFilterPostingsFormat; - } - return internalGetPostingsFormatForField(field); - } - - private PostingsFormat internalGetPostingsFormatForField(String field) { - final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); - if (format != null) { - return format; - } - // return our own posting format using PFOR - return es812PostingsFormat; - } - - boolean useBloomFilter(String field) { - IndexSettings indexSettings = mapperService.getIndexSettings(); - if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { - // In case for time series indices, they _id isn't randomly generated, - // but based on dimension fields and timestamp field, so during indexing - // version/seq_no/term needs to be looked up and having a bloom filter - // can speed this up significantly. - return indexSettings.getMode() == IndexMode.TIME_SERIES - && IdFieldMapper.NAME.equals(field) - && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } else { - return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } + return formatSupplier.getPostingsFormatForField(field); } @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - Mapper mapper = mapperService.mappingLookup().getMapper(field); - if (mapper instanceof DenseVectorFieldMapper vectorMapper) { - return vectorMapper.getKnnVectorsFormatForField(super.getKnnVectorsFormatForField(field)); - } - return super.getKnnVectorsFormatForField(field); + return formatSupplier.getKnnVectorsFormatForField(field); } @Override public DocValuesFormat getDocValuesFormatForField(String field) { - if (useTSDBDocValuesFormat(field)) { - return tsdbDocValuesFormat; - } - return docValuesFormat; - } - - boolean useTSDBDocValuesFormat(final String field) { - if (excludeFields(field)) { - return false; - } - - return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); - } - - private boolean excludeFields(String fieldName) { - // Avoid using tsdb codec for fields like _seq_no, _primary_term. - // But _tsid and _ts_routing_hash should always use the tsdb codec. - return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; - } - - private boolean isTimeSeriesModeIndex() { - return IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + return formatSupplier.getDocValuesFormatForField(field); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java new file mode 100644 index 0000000000000..b827bb6436f07 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.StoredFieldsWriter; +import org.apache.lucene.codecs.compressing.CompressionMode; +import org.apache.lucene.codecs.compressing.Compressor; +import org.apache.lucene.codecs.compressing.Decompressor; +import org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingStoredFieldsFormat; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.store.ByteBuffersDataInput; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; +import org.elasticsearch.nativeaccess.NativeAccess; +import org.elasticsearch.nativeaccess.Zstd; + +import java.io.IOException; + +/** + * {@link org.apache.lucene.codecs.StoredFieldsFormat} that compresses blocks of data using ZStandard. + * + * Unlike Lucene's default stored fields format, this format does not make use of dictionaries (even though ZStandard has great support for + * dictionaries!). This is mostly due to the fact that LZ4/DEFLATE have short sliding windows that they can use to find duplicate strings + * (64kB and 32kB respectively). In contrast, ZSTD doesn't have such a limitation and can better take advantage of large compression + * buffers. + */ +public final class Zstd814StoredFieldsFormat extends Lucene90CompressingStoredFieldsFormat { + + // ZSTD has special optimizations for inputs that are less than 16kB and less than 256kB. So subtract a bit of memory from 16kB and + // 256kB to make our inputs unlikely to grow beyond 16kB for BEST_SPEED and 256kB for BEST_COMPRESSION. + private static final int BEST_SPEED_BLOCK_SIZE = (16 - 2) * 1_024; + private static final int BEST_COMPRESSION_BLOCK_SIZE = (256 - 16) * 1_024; + + /** Attribute key for compression mode. */ + public static final String MODE_KEY = Zstd814StoredFieldsFormat.class.getSimpleName() + ".mode"; + + public enum Mode { + BEST_SPEED(0, BEST_SPEED_BLOCK_SIZE, 128), + BEST_COMPRESSION(3, BEST_COMPRESSION_BLOCK_SIZE, 2048); + + final int level, blockSizeInBytes, blockDocCount; + + Mode(int level, int blockSizeInBytes, int blockDocCount) { + this.level = level; + this.blockSizeInBytes = blockSizeInBytes; + this.blockDocCount = blockDocCount; + } + } + + private final Mode mode; + + public Zstd814StoredFieldsFormat(Mode mode) { + super("ZstdStoredFields814", new ZstdCompressionMode(mode.level), mode.blockSizeInBytes, mode.blockDocCount, 10); + this.mode = mode; + } + + @Override + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { + // Both modes are compatible, we only put an attribute for debug purposes. + String previous = si.putAttribute(MODE_KEY, mode.name()); + if (previous != null && previous.equals(mode.name()) == false) { + throw new IllegalStateException( + "found existing value for " + MODE_KEY + " for segment: " + si.name + "old=" + previous + ", new=" + mode.name() + ); + } + return super.fieldsWriter(directory, si, context); + } + + private static class ZstdCompressionMode extends CompressionMode { + private final int level; + + ZstdCompressionMode(int level) { + this.level = level; + } + + @Override + public Compressor newCompressor() { + return new ZstdCompressor(level); + } + + @Override + public Decompressor newDecompressor() { + return new ZstdDecompressor(); + } + + @Override + public String toString() { + return "ZSTD(level=" + level + ")"; + } + } + + private static final class ZstdDecompressor extends Decompressor { + + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdDecompressor() {} + + @Override + public void decompress(DataInput in, int originalLength, int offset, int length, BytesRef bytes) throws IOException { + if (originalLength == 0) { + bytes.offset = 0; + bytes.length = 0; + return; + } + + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int compressedLength = in.readVInt(); + + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(compressedLength); + CloseableByteBuffer dest = nativeAccess.newBuffer(originalLength) + ) { + + while (src.buffer().position() < compressedLength) { + final int numBytes = Math.min(copyBuffer.length, compressedLength - src.buffer().position()); + in.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int decompressedLen = zstd.decompress(dest, src); + if (decompressedLen != originalLength) { + throw new CorruptIndexException("Expected " + originalLength + " decompressed bytes, got " + decompressedLen, in); + } + + bytes.bytes = ArrayUtil.growNoCopy(bytes.bytes, length); + dest.buffer().get(offset, bytes.bytes, 0, length); + bytes.offset = 0; + bytes.length = length; + } + } + + @Override + public Decompressor clone() { + return new ZstdDecompressor(); + } + } + + private static class ZstdCompressor extends Compressor { + + final int level; + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdCompressor(int level) { + this.level = level; + } + + @Override + public void compress(ByteBuffersDataInput buffersInput, DataOutput out) throws IOException { + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int srcLen = Math.toIntExact(buffersInput.length()); + if (srcLen == 0) { + return; + } + + final int compressBound = zstd.compressBound(srcLen); + + // NOTE: We are allocating/deallocating native buffers on each call. We could save allocations by reusing these buffers, though + // this would come at the expense of higher permanent memory usage. Benchmarks suggested that there is some performance to save + // there, but it wouldn't be a game changer either. + // Also note that calls to #compress implicitly allocate memory under the hood for e.g. hash tables and chain tables that help + // identify duplicate strings. So if we wanted to avoid allocating memory on every compress call, we should also look into + // reusing compression contexts, which are not small and would increase permanent memory usage as well. + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(srcLen); + CloseableByteBuffer dest = nativeAccess.newBuffer(compressBound) + ) { + + while (buffersInput.position() < buffersInput.length()) { + final int numBytes = Math.min(copyBuffer.length, (int) (buffersInput.length() - buffersInput.position())); + buffersInput.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int compressedLen = zstd.compress(dest, src, level); + out.writeVInt(compressedLen); + + for (int written = 0; written < compressedLen;) { + final int numBytes = Math.min(copyBuffer.length, compressedLen - written); + dest.buffer().get(copyBuffer, 0, numBytes); + out.writeBytes(copyBuffer, 0, numBytes); + written += numBytes; + assert written == dest.buffer().position(); + } + } + } + + @Override + public void close() throws IOException {} + } +} diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java index f3dcda813a39d..bc83f85edcf7d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class BooleanScriptLeafFieldData extends LeafLongFieldData { private final BooleanScriptDocValues booleanScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java index 1199583f89766..a9fdf72e23a31 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DateScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java index 9307233f99161..e08a62eee8fb0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java @@ -89,6 +89,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DoubleScriptLeafFieldData extends LeafDoubleFieldData { private final DoubleScriptDocValues doubleScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java index dce94649e0088..391e9e285807f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java @@ -121,6 +121,7 @@ public final SortField sortField( case LONG: case DOUBLE: // longs, doubles and dates use the same type for doc-values and points. + sortField.setOptimizeSortWithPoints(isIndexed()); break; default: @@ -132,12 +133,18 @@ public final SortField sortField( } /** - * Does {@link #sortField} require a custom comparator because of the way - * the data is stored in doc values ({@code true}) or are the docs values - * stored such that they can be sorted without decoding ({@code false}). + * Should sorting use a custom comparator source vs. rely on a Lucene {@link SortField}. Using a Lucene {@link SortField} when possible + * is important because index sorting cannot be configured with a custom comparator, and because it gives better performance by + * dynamically pruning irrelevant hits. On the other hand, Lucene {@link SortField}s are less flexible and make stronger assumptions + * about how the data is indexed. Therefore, they cannot be used in all cases. */ protected abstract boolean sortRequiresCustomComparator(); + /** + * Return true if, and only if the field is indexed with points that match the content of doc values. + */ + protected abstract boolean isIndexed(); + @Override public final SortField sortField(Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { return sortField(getNumericType(), missingValue, sortMode, nested, reverse); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java index 6be5eb9514918..85850b530a1de 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java @@ -92,6 +92,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class LongScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java index a1686344b9309..b7654dfa5569f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java @@ -42,22 +42,25 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -65,18 +68,21 @@ public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedDoublesIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint(); this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -94,6 +100,11 @@ protected boolean sortRequiresCustomComparator() { return numericType == NumericType.HALF_FLOAT; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return numericType; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java index c2507dd2470a5..9c871ac822625 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -42,26 +42,34 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; - public Builder(String name, NumericType numericType, ToScriptFieldFactory toScriptFieldFactory) { - this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory); + public Builder( + String name, + NumericType numericType, + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed + ) { + this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory, indexed); } public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -69,18 +77,21 @@ public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedNumericIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint() == false; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -98,6 +109,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override protected XFieldComparatorSource dateComparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) { if (numericType == NumericType.DATE_NANOSECONDS) { diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java index d7f6e3541838b..92f74615711f1 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java @@ -8,26 +8,27 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; import java.util.HashSet; -import java.util.List; import java.util.Set; /** - * A field visitor that allows to load a selection of the stored fields by exact name - * {@code _id} and {@code _routing} fields are always loaded. + * A field visitor that allows to load a selection of the stored fields by exact name. + * {@code _id}, {@code _routing}, and {@code _ignored} fields are always loaded. + * {@code _source} is always loaded unless disabled explicitly. */ public class CustomFieldsVisitor extends FieldsVisitor { - private final Set fields; public CustomFieldsVisitor(Set fields, boolean loadSource) { super(loadSource); this.fields = new HashSet<>(fields); - // metadata fields are already handled by FieldsVisitor, so removing - // them here means that if the only fields requested are metadata - // fields then we can shortcut loading - List.of("_id", "_routing", "_source").forEach(this.fields::remove); + // metadata fields that are always retrieved are already handled by FieldsVisitor, so removing + // them here means that if the only fields requested are those metadata fields then we can shortcut loading + FieldsVisitor.BASE_REQUIRED_FIELDS.forEach(this.fields::remove); + this.fields.remove(this.sourceFieldName); + this.fields.remove(IgnoredFieldMapper.NAME); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index 4789dcc131b89..bf4ad150b1ee4 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -34,10 +34,10 @@ * Base {@link StoredFieldVisitor} that retrieves all non-redundant metadata. */ public class FieldsVisitor extends FieldNamesProvidingStoredFieldsVisitor { - private static final Set BASE_REQUIRED_FIELDS = Set.of(IdFieldMapper.NAME, RoutingFieldMapper.NAME); + static final Set BASE_REQUIRED_FIELDS = Set.of(IdFieldMapper.NAME, RoutingFieldMapper.NAME); private final boolean loadSource; - private final String sourceFieldName; + final String sourceFieldName; private final Set requiredFields; protected BytesReference source; protected String id; @@ -63,6 +63,7 @@ public Status needsField(FieldInfo fieldInfo) { // Always load _ignored to be explicit about ignored fields // This works because _ignored is added as the first metadata mapper, // so its stored fields always appear first in the list. + // Note that _ignored is also multi-valued, which is why it can't be removed from the set like other fields if (IgnoredFieldMapper.NAME.equals(fieldInfo.name)) { return Status.YES; } @@ -72,8 +73,7 @@ public Status needsField(FieldInfo fieldInfo) { return Status.YES; } } - // All these fields are single-valued so we can stop when the set is - // empty + // All these fields are single-valued so we can stop when the set is empty return requiredFields.isEmpty() ? Status.STOP : Status.NO; } @@ -100,7 +100,7 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) { binaryField(fieldInfo, new BytesRef(value)); } - public void binaryField(FieldInfo fieldInfo, BytesRef value) { + private void binaryField(FieldInfo fieldInfo, BytesRef value) { if (sourceFieldName.equals(fieldInfo.name)) { source = new BytesArray(value); } else if (IdFieldMapper.NAME.equals(fieldInfo.name)) { @@ -147,12 +147,6 @@ public void doubleField(FieldInfo fieldInfo, double value) { addValue(fieldInfo.name, value); } - public void objectField(FieldInfo fieldInfo, Object object) { - assert IdFieldMapper.NAME.equals(fieldInfo.name) == false : "_id field must go through binaryField"; - assert sourceFieldName.equals(fieldInfo.name) == false : "source field must go through binaryField"; - addValue(fieldInfo.name, object); - } - public BytesReference source() { return source; } @@ -178,7 +172,9 @@ public Map> fields() { } public void reset() { - if (fieldsValues != null) fieldsValues.clear(); + if (fieldsValues != null) { + fieldsValues.clear(); + } source = null; id = null; @@ -193,11 +189,7 @@ void addValue(String name, Object value) { fieldsValues = new HashMap<>(); } - List values = fieldsValues.get(name); - if (values == null) { - values = new ArrayList<>(2); - fieldsValues.put(name, values); - } + List values = fieldsValues.computeIfAbsent(name, k -> new ArrayList<>(2)); values.add(value); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 968c48abc54d8..f07cd1cc32076 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -276,7 +276,9 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext } if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new); + // boolean fields are indexed, but not with points + boolean indexed = false; + return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new, indexed); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 1b926734c1713..3092ed1e827df 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -795,7 +795,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext return new SortedNumericIndexFieldData.Builder( name(), resolution.numericType(), - resolution.getDefaultToScriptFieldFactory() + resolution.getDefaultToScriptFieldFactory(), + isIndexed() ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 92aa8662eaf9d..a42477bed2146 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.FlatteningXContentParser; import org.elasticsearch.xcontent.XContentParser; @@ -618,7 +619,14 @@ public final MapperBuilderContext createDynamicMapperBuilderContext() { if (objectMapper instanceof PassThroughObjectMapper passThroughObjectMapper) { containsDimensions = passThroughObjectMapper.containsDimensions(); } - return new MapperBuilderContext(p, mappingLookup().isSourceSynthetic(), false, containsDimensions, dynamic); + return new MapperBuilderContext( + p, + mappingLookup().isSourceSynthetic(), + false, + containsDimensions, + dynamic, + MergeReason.MAPPING_UPDATE + ); } public abstract XContentParser parser(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java index bbfb9298c23ca..15caa7f5a6238 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import java.util.Objects; @@ -22,7 +23,11 @@ public class MapperBuilderContext { * The root context, to be used when building a tree of mappers */ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream) { - return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC); + return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE); + } + + public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason) { + return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC, mergeReason); } private final String path; @@ -30,9 +35,10 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat private final boolean isDataStream; private final boolean parentObjectContainsDimensions; private final ObjectMapper.Dynamic dynamic; + private final MergeReason mergeReason; MapperBuilderContext(String path) { - this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC); + this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC, MergeReason.MAPPING_UPDATE); } MapperBuilderContext( @@ -40,7 +46,8 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat boolean isSourceSynthetic, boolean isDataStream, boolean parentObjectContainsDimensions, - ObjectMapper.Dynamic dynamic + ObjectMapper.Dynamic dynamic, + MergeReason mergeReason ) { Objects.requireNonNull(dynamic, "dynamic must not be null"); this.path = path; @@ -48,6 +55,7 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat this.isDataStream = isDataStream; this.parentObjectContainsDimensions = parentObjectContainsDimensions; this.dynamic = dynamic; + this.mergeReason = mergeReason; } /** @@ -79,7 +87,8 @@ public MapperBuilderContext createChildContext( this.isSourceSynthetic, this.isDataStream, parentObjectContainsDimensions, - getDynamic(dynamic) + getDynamic(dynamic), + this.mergeReason ); } @@ -121,4 +130,12 @@ public boolean parentObjectContainsDimensions() { public ObjectMapper.Dynamic getDynamic() { return dynamic; } + + /** + * The merge reason to use when merging mappers while building the mapper. + * See also {@link ObjectMapper.Builder#buildMappers(MapperBuilderContext)}. + */ + public MergeReason getMergeReason() { + return mergeReason; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java index 8f8854ad47c7d..1e3f69baf86dd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java @@ -8,6 +8,8 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.index.mapper.MapperService.MergeReason; + /** * Holds context used when merging mappings. * As the merge process also involves building merged {@link Mapper.Builder}s, @@ -23,11 +25,18 @@ private MapperMergeContext(MapperBuilderContext mapperBuilderContext, NewFieldsB this.newFieldsBudget = newFieldsBudget; } + static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) { + return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE, newFieldsBudget); + } + /** * The root context, to be used when merging a tree of mappers */ - public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) { - return new MapperMergeContext(MapperBuilderContext.root(isSourceSynthetic, isDataStream), NewFieldsBudget.of(newFieldsBudget)); + public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason, long newFieldsBudget) { + return new MapperMergeContext( + MapperBuilderContext.root(isSourceSynthetic, isDataStream, mergeReason), + NewFieldsBudget.of(newFieldsBudget) + ); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 4646936b8891f..f91c4f176c6da 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -310,7 +310,7 @@ public void updateMapping(final IndexMetadata currentIndexMetadata, final IndexM if (newMappingMetadata != null) { String type = newMappingMetadata.type(); CompressedXContent incomingMappingSource = newMappingMetadata.source(); - Mapping incomingMapping = parseMapping(type, incomingMappingSource); + Mapping incomingMapping = parseMapping(type, MergeReason.MAPPING_UPDATE, incomingMappingSource); DocumentMapper previousMapper; synchronized (this) { previousMapper = this.mapper; @@ -366,7 +366,7 @@ boolean assertNoUpdateRequired(final IndexMetadata newIndexMetadata) { // that the incoming mappings are the same as the current ones: we need to // parse the incoming mappings into a DocumentMapper and check that its // serialization is the same as the existing mapper - Mapping newMapping = parseMapping(mapping.type(), mapping.source()); + Mapping newMapping = parseMapping(mapping.type(), MergeReason.MAPPING_UPDATE, mapping.source()); final CompressedXContent currentSource = this.mapper.mappingSource(); final CompressedXContent newSource = newMapping.toCompressedXContent(); if (Objects.equals(currentSource, newSource) == false @@ -533,7 +533,7 @@ public DocumentMapper merge(String type, CompressedXContent mappingSource, Merge } private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map mappingSourceAsMap) { - Mapping incomingMapping = parseMapping(type, mappingSourceAsMap); + Mapping incomingMapping = parseMapping(type, reason, mappingSourceAsMap); Mapping mapping = mergeMappings(this.mapper, incomingMapping, reason, this.indexSettings); // TODO: In many cases the source here is equal to mappingSource so we need not serialize again. // We should identify these cases reliably and save expensive serialization here @@ -542,7 +542,7 @@ private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map return newMapper; } this.mapper = newMapper; - assert assertSerialization(newMapper); + assert assertSerialization(newMapper, reason); return newMapper; } @@ -552,9 +552,9 @@ private DocumentMapper newDocumentMapper(Mapping mapping, MergeReason reason, Co return newMapper; } - public Mapping parseMapping(String mappingType, CompressedXContent mappingSource) { + public Mapping parseMapping(String mappingType, MergeReason reason, CompressedXContent mappingSource) { try { - return mappingParser.parse(mappingType, mappingSource); + return mappingParser.parse(mappingType, reason, mappingSource); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage()); } @@ -564,12 +564,13 @@ public Mapping parseMapping(String mappingType, CompressedXContent mappingSource * A method to parse mapping from a source in a map form. * * @param mappingType the mapping type + * @param reason the merge reason to use when merging mappers while building the mapper * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise * @return a parsed mapping */ - public Mapping parseMapping(String mappingType, Map mappingSource) { + public Mapping parseMapping(String mappingType, MergeReason reason, Map mappingSource) { try { - return mappingParser.parse(mappingType, mappingSource); + return mappingParser.parse(mappingType, reason, mappingSource); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage()); } @@ -619,10 +620,10 @@ static Mapping mergeMappings(DocumentMapper currentMapper, Mapping incomingMappi return newMapping; } - private boolean assertSerialization(DocumentMapper mapper) { + private boolean assertSerialization(DocumentMapper mapper, MergeReason reason) { // capture the source now, it may change due to concurrent parsing final CompressedXContent mappingSource = mapper.mappingSource(); - Mapping newMapping = parseMapping(mapper.type(), mappingSource); + Mapping newMapping = parseMapping(mapper.type(), reason, mappingSource); if (newMapping.toCompressedXContent().equals(mappingSource) == false) { throw new AssertionError( "Mapping serialization result is different from source. \n--> Source [" diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index 903e4e5da5b29..b5de3971fa091 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -137,8 +137,8 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { * @return the resulting merged mapping. */ Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) { - MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, newFieldsBudget); - RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, mergeContext); + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, reason, newFieldsBudget); + RootObjectMapper mergedRoot = root.merge(mergeWith.root, mergeContext); // When merging metadata fields as part of applying an index template, new field definitions // completely overwrite existing ones instead of being merged. This behavior matches how we @@ -176,11 +176,11 @@ Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) { * @param fieldsBudget the maximum number of fields this mapping may have */ public Mapping withFieldsBudget(long fieldsBudget) { - MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, fieldsBudget); + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, MergeReason.MAPPING_RECOVERY, fieldsBudget); // get a copy of the root mapper, without any fields RootObjectMapper shallowRoot = root.withoutMappers(); // calling merge on the shallow root to ensure we're only adding as many fields as allowed by the fields budget - return new Mapping(shallowRoot.merge(root, MergeReason.MAPPING_RECOVERY, mergeContext), metadataMappers, meta); + return new Mapping(shallowRoot.merge(root, mergeContext), metadataMappers, meta); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index 8b30915ca4d3c..86d8c1686858c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.XContentType; import java.util.Collections; @@ -79,20 +80,25 @@ static Map convertToMap(CompressedXContent source) { } Mapping parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { + return parse(type, MergeReason.MAPPING_UPDATE, source); + } + + Mapping parse(@Nullable String type, MergeReason reason, CompressedXContent source) throws MapperParsingException { Map mapping = convertToMap(source); - return parse(type, mapping); + return parse(type, reason, mapping); } /** * A method to parse mapping from a source in a map form. * * @param type the mapping type + * @param reason the merge reason to use when merging mappers while building the mapper * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise * @return a parsed mapping * @throws MapperParsingException in case of parsing error */ @SuppressWarnings("unchecked") - Mapping parse(@Nullable String type, Map mappingSource) throws MapperParsingException { + Mapping parse(@Nullable String type, MergeReason reason, Map mappingSource) throws MapperParsingException { if (mappingSource.isEmpty()) { if (type == null) { throw new MapperParsingException("malformed mapping, no type name found"); @@ -178,7 +184,7 @@ Mapping parse(@Nullable String type, Map mappingSource) throws M } return new Mapping( - rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream)), + rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream, reason)), metadataMappers.values().toArray(new MetadataFieldMapper[0]), meta ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index f07d69d86f36c..5c2880a4bf760 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -65,7 +65,8 @@ public NestedObjectMapper build(MapperBuilderContext context) { NestedMapperBuilderContext nestedContext = new NestedMapperBuilderContext( context.buildFullName(name()), parentIncludedInRoot, - context.getDynamic(dynamic) + context.getDynamic(dynamic), + context.getMergeReason() ); final String fullPath = context.buildFullName(name()); final String nestedTypePath; @@ -121,14 +122,14 @@ private static class NestedMapperBuilderContext extends MapperBuilderContext { final boolean parentIncludedInRoot; - NestedMapperBuilderContext(String path, boolean parentIncludedInRoot, Dynamic dynamic) { - super(path, false, false, false, dynamic); + NestedMapperBuilderContext(String path, boolean parentIncludedInRoot, Dynamic dynamic, MapperService.MergeReason mergeReason) { + super(path, false, false, false, dynamic, mergeReason); this.parentIncludedInRoot = parentIncludedInRoot; } @Override public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { - return new NestedMapperBuilderContext(buildFullName(name), parentIncludedInRoot, getDynamic(dynamic)); + return new NestedMapperBuilderContext(buildFullName(name), parentIncludedInRoot, getDynamic(dynamic), getMergeReason()); } } @@ -226,16 +227,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if ((mergeWith instanceof NestedObjectMapper) == false) { MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith; - return merge(mergeWithObject, reason, parentMergeContext); - } - ObjectMapper merge(NestedObjectMapper mergeWithObject, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { - var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); + final MapperService.MergeReason reason = parentMergeContext.getMapperBuilderContext().getMergeReason(); + var mergeResult = MergeResult.build(this, mergeWithObject, parentMergeContext); Explicit incInParent = this.includeInParent; Explicit incInRoot = this.includeInRoot; if (reason == MapperService.MergeReason.INDEX_TEMPLATE) { @@ -287,7 +286,8 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo new NestedMapperBuilderContext( mapperBuilderContext.buildFullName(name), parentIncludedInRoot, - mapperBuilderContext.getDynamic(dynamic) + mapperBuilderContext.getDynamic(dynamic), + mapperBuilderContext.getMergeReason() ) ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 1f7a3bf2106ae..ebb6672cbab18 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -403,8 +403,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, HalfFloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + HalfFloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -577,8 +583,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, FloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + FloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -717,8 +729,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, DoubleDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + DoubleDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -831,8 +849,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ByteDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ByteDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -930,8 +954,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ShortDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ShortDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1097,8 +1127,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, IntegerDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + IntegerDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1234,8 +1270,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, LongDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + LongDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1494,7 +1536,7 @@ public static Query longRangeQuery( return builder.apply(l, u); } - public abstract IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType); + public abstract IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType); public IndexFieldData.Builder getValueFetcherFieldDataBuilder( String name, @@ -1693,7 +1735,7 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext : type.numericType.getValuesSourceType(); if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return type.getFieldDataBuilder(name(), valuesSourceType); + return type.getFieldDataBuilder(this, valuesSourceType); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 33e736ff122a1..ba396e9a72d30 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -454,11 +454,6 @@ public final boolean subobjects() { return subobjects.value(); } - @Override - public ObjectMapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { - return merge(mergeWith, MergeReason.MAPPING_UPDATE, mapperMergeContext); - } - @Override public void validate(MappingLookup mappers) { for (Mapper mapper : this.mappers.values()) { @@ -470,7 +465,8 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo return mapperMergeContext.createChildContext(name, dynamic); } - public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + @Override + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if (mergeWith instanceof ObjectMapper == false) { MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } @@ -478,11 +474,7 @@ public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeConte // TODO stop NestedObjectMapper extending ObjectMapper? MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } - return merge((ObjectMapper) mergeWith, reason, parentMergeContext); - } - - ObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { - var mergeResult = MergeResult.build(this, mergeWith, reason, parentMergeContext); + var mergeResult = MergeResult.build(this, (ObjectMapper) mergeWith, parentMergeContext); return new ObjectMapper( simpleName(), fullPath, @@ -499,13 +491,9 @@ protected record MergeResult( ObjectMapper.Dynamic dynamic, Map mappers ) { - static MergeResult build( - ObjectMapper existing, - ObjectMapper mergeWithObject, - MergeReason reason, - MapperMergeContext parentMergeContext - ) { + static MergeResult build(ObjectMapper existing, ObjectMapper mergeWithObject, MapperMergeContext parentMergeContext) { final Explicit enabled; + final MergeReason reason = parentMergeContext.getMapperBuilderContext().getMergeReason(); if (mergeWithObject.enabled.explicit()) { if (reason == MergeReason.INDEX_TEMPLATE) { enabled = mergeWithObject.enabled; @@ -532,13 +520,7 @@ static MergeResult build( subObjects = existing.subobjects; } MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.simpleName()); - Map mergedMappers = buildMergedMappers( - existing, - mergeWithObject, - reason, - objectMergeContext, - subObjects.value() - ); + Map mergedMappers = buildMergedMappers(existing, mergeWithObject, objectMergeContext, subObjects.value()); return new MergeResult( enabled, subObjects, @@ -550,7 +532,6 @@ static MergeResult build( private static Map buildMergedMappers( ObjectMapper existing, ObjectMapper mergeWithObject, - MergeReason reason, MapperMergeContext objectMergeContext, boolean subobjects ) { @@ -576,11 +557,11 @@ private static Map buildMergedMappers( } else if (objectMergeContext.decrementFieldBudgetIfPossible(mergeWithMapper.getTotalFieldsCount())) { putMergedMapper(mergedMappers, mergeWithMapper); } else if (mergeWithMapper instanceof ObjectMapper om) { - putMergedMapper(mergedMappers, truncateObjectMapper(reason, objectMergeContext, om)); + putMergedMapper(mergedMappers, truncateObjectMapper(objectMergeContext, om)); } } else if (mergeIntoMapper instanceof ObjectMapper objectMapper) { assert subobjects : "existing object mappers are supposed to be flattened if subobjects is false"; - putMergedMapper(mergedMappers, objectMapper.merge(mergeWithMapper, reason, objectMergeContext)); + putMergedMapper(mergedMappers, objectMapper.merge(mergeWithMapper, objectMergeContext)); } else { assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; if (mergeWithMapper instanceof NestedObjectMapper) { @@ -591,7 +572,7 @@ private static Map buildMergedMappers( // If we're merging template mappings when creating an index, then a field definition always // replaces an existing one. - if (reason == MergeReason.INDEX_TEMPLATE) { + if (objectMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE) { putMergedMapper(mergedMappers, mergeWithMapper); } else { putMergedMapper(mergedMappers, mergeIntoMapper.merge(mergeWithMapper, objectMergeContext)); @@ -607,13 +588,13 @@ private static void putMergedMapper(Map mergedMappers, @Nullable } } - private static ObjectMapper truncateObjectMapper(MergeReason reason, MapperMergeContext context, ObjectMapper objectMapper) { + private static ObjectMapper truncateObjectMapper(MapperMergeContext context, ObjectMapper objectMapper) { // there's not enough capacity for the whole object mapper, // so we're just trying to add the shallow object, without it's sub-fields ObjectMapper shallowObjectMapper = objectMapper.withoutMappers(); if (context.decrementFieldBudgetIfPossible(shallowObjectMapper.getTotalFieldsCount())) { // now trying to add the sub-fields one by one via a merge, until we hit the limit - return shallowObjectMapper.merge(objectMapper, reason, context); + return shallowObjectMapper.merge(objectMapper, context); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 16b4d0b49917f..d44f03d72e211 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -100,9 +99,14 @@ public PassThroughObjectMapper.Builder newBuilder(IndexVersion indexVersionCreat return builder; } - public PassThroughObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, MapperMergeContext parentBuilderContext) { - final var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + @Override + public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parentBuilderContext) { + if (mergeWith instanceof PassThroughObjectMapper == false) { + MapperErrors.throwObjectMappingConflictError(mergeWith.name()); + } + PassThroughObjectMapper mergeWithObject = (PassThroughObjectMapper) mergeWith; + final var mergeResult = MergeResult.build(this, mergeWithObject, parentBuilderContext); final Explicit containsDimensions = (mergeWithObject.timeSeriesDimensionSubFields.explicit()) ? mergeWithObject.timeSeriesDimensionSubFields diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 90d9c879c57e1..8db3a970e31c8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -345,15 +345,13 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo } @Override - public RootObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if (mergeWith instanceof RootObjectMapper == false) { MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } - return merge((RootObjectMapper) mergeWith, reason, parentMergeContext); - } - RootObjectMapper merge(RootObjectMapper mergeWithObject, MergeReason reason, MapperMergeContext parentMergeContext) { - final var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); + RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; + final var mergeResult = MergeResult.build(this, mergeWithObject, parentMergeContext); final Explicit numericDetection; if (mergeWithObject.numericDetection.explicit()) { numericDetection = mergeWithObject.numericDetection; @@ -377,7 +375,7 @@ RootObjectMapper merge(RootObjectMapper mergeWithObject, MergeReason reason, Map final Explicit dynamicTemplates; if (mergeWithObject.dynamicTemplates.explicit()) { - if (reason == MergeReason.INDEX_TEMPLATE) { + if (parentMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE) { Map templatesByKey = new LinkedHashMap<>(); for (DynamicTemplate template : this.dynamicTemplates.value()) { templatesByKey.put(template.name(), template); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index 2635c1c11be8e..a46a310d0770f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -216,7 +216,7 @@ public Query rangeQuery( @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new, isIndexed()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 15770785e11f9..4a6eaa5b26c39 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; @@ -28,6 +29,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -39,6 +41,8 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final String CONTENT_TYPE = "_source"; + public static final String LOSSY_PARAMETERS_ALLOWED_SETTING_NAME = "index.lossy.source-mapping-parameters"; + /** The source mode */ private enum Mode { DISABLED, @@ -128,9 +132,12 @@ public static class Builder extends MetadataFieldMapper.Builder { private final IndexMode indexMode; - public Builder(IndexMode indexMode) { + private final boolean supportsNonDefaultParameterValues; + + public Builder(IndexMode indexMode, final Settings settings) { super(Defaults.NAME); this.indexMode = indexMode; + this.supportsNonDefaultParameterValues = settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); } public Builder setSynthetic() { @@ -145,13 +152,10 @@ protected Parameter[] getParameters() { private boolean isDefault() { Mode m = mode.get(); - if (m != null && (indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) { + if (m != null && (((indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) || m == Mode.DISABLED)) { return false; } - if (enabled.get().value() == false) { - return false; - } - return includes.getValue().isEmpty() && excludes.getValue().isEmpty(); + return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty(); } @Override @@ -167,6 +171,27 @@ public SourceFieldMapper build() { if (isDefault()) { return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : DEFAULT; } + if (supportsNonDefaultParameterValues == false) { + List disallowed = new ArrayList<>(); + if (enabled.get().value() == false) { + disallowed.add("enabled"); + } + if (includes.get().isEmpty() == false) { + disallowed.add("includes"); + } + if (excludes.get().isEmpty() == false) { + disallowed.add("excludes"); + } + if (mode.get() == Mode.DISABLED) { + disallowed.add("mode=disabled"); + } + assert disallowed.isEmpty() == false; + throw new MapperParsingException( + disallowed.size() == 1 + ? "Parameter [" + disallowed.get(0) + "] is not allowed in source" + : "Parameters [" + String.join(",", disallowed) + "] are not allowed in source" + ); + } SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( mode.get(), enabled.get(), @@ -186,7 +211,7 @@ public SourceFieldMapper build() { c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, - c -> new Builder(c.getIndexSettings().getMode()) + c -> new Builder(c.getIndexSettings().getMode(), c.getSettings()) ); static final class SourceFieldType extends MappedFieldType { @@ -321,7 +346,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode).init(this); + return new Builder(indexMode, Settings.EMPTY).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java index 0d4f5562d3046..1d4f56b02ed74 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java @@ -66,7 +66,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new, isIndexed()); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java index 5bdeac75989a8..87173aceaa059 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java @@ -30,10 +30,10 @@ * Response used for actions that index many documents using a scroll request. */ public class BulkByScrollResponse extends ActionResponse implements ToXContentFragment { - private TimeValue took; - private BulkByScrollTask.Status status; - private List bulkFailures; - private List searchFailures; + private final TimeValue took; + private final BulkByScrollTask.Status status; + private final List bulkFailures; + private final List searchFailures; private boolean timedOut; static final String TOOK_FIELD = "took"; diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index 81eb40e6f6f61..608fa3128bf09 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -32,6 +32,7 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { private final long coordinatingRejections; private final long primaryRejections; private final long replicaRejections; + private final long primaryDocumentRejections; private final long memoryLimit; // These fields will be used for additional back-pressure and metrics in the future @@ -70,6 +71,12 @@ public IndexingPressureStats(StreamInput in) throws IOException { this.currentCoordinatingOps = 0; this.currentPrimaryOps = 0; this.currentReplicaOps = 0; + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { + primaryDocumentRejections = in.readVLong(); + } else { + primaryDocumentRejections = -1L; + } } public IndexingPressureStats( @@ -90,7 +97,8 @@ public IndexingPressureStats( long totalReplicaOps, long currentCoordinatingOps, long currentPrimaryOps, - long currentReplicaOps + long currentReplicaOps, + long primaryDocumentRejections ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -111,6 +119,8 @@ public IndexingPressureStats( this.currentCoordinatingOps = currentCoordinatingOps; this.currentPrimaryOps = currentPrimaryOps; this.currentReplicaOps = currentReplicaOps; + + this.primaryDocumentRejections = primaryDocumentRejections; } @Override @@ -132,6 +142,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { out.writeVLong(memoryLimit); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { + out.writeVLong(primaryDocumentRejections); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -206,6 +220,10 @@ public long getMemoryLimit() { return memoryLimit; } + public long getPrimaryDocumentRejections() { + return primaryDocumentRejections; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; @@ -219,6 +237,7 @@ public long getMemoryLimit() { private static final String COORDINATING_REJECTIONS = "coordinating_rejections"; private static final String PRIMARY_REJECTIONS = "primary_rejections"; private static final String REPLICA_REJECTIONS = "replica_rejections"; + private static final String PRIMARY_DOCUMENT_REJECTIONS = "primary_document_rejections"; private static final String LIMIT = "limit"; private static final String LIMIT_IN_BYTES = "limit_in_bytes"; @@ -246,6 +265,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(COORDINATING_REJECTIONS, coordinatingRejections); builder.field(PRIMARY_REJECTIONS, primaryRejections); builder.field(REPLICA_REJECTIONS, replicaRejections); + builder.field(PRIMARY_DOCUMENT_REJECTIONS, primaryDocumentRejections); builder.endObject(); builder.humanReadableField(LIMIT_IN_BYTES, LIMIT, ByteSizeValue.ofBytes(memoryLimit)); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java index ce6f1b21b734c..d5973807d9d78 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java @@ -13,41 +13,49 @@ import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class InferenceServiceRegistry implements Closeable { + + private final Map services; + private final List namedWriteables = new ArrayList<>(); + + public InferenceServiceRegistry( + List inferenceServicePlugins, + InferenceServiceExtension.InferenceServiceFactoryContext factoryContext + ) { + // TODO check names are unique + services = inferenceServicePlugins.stream() + .flatMap(r -> r.getInferenceServiceFactories().stream()) + .map(factory -> factory.create(factoryContext)) + .collect(Collectors.toMap(InferenceService::name, Function.identity())); + } -public interface InferenceServiceRegistry extends Closeable { - void init(Client client); - - Map getServices(); - - Optional getService(String serviceName); - - List getNamedWriteables(); - - class NoopInferenceServiceRegistry implements InferenceServiceRegistry { - public NoopInferenceServiceRegistry() {} + public void init(Client client) { + services.values().forEach(s -> s.init(client)); + } - @Override - public void init(Client client) {} + public Map getServices() { + return services; + } - @Override - public Map getServices() { - return Map.of(); - } + public Optional getService(String serviceName) { + return Optional.ofNullable(services.get(serviceName)); + } - @Override - public Optional getService(String serviceName) { - return Optional.empty(); - } + public List getNamedWriteables() { + return namedWriteables; + } - @Override - public List getNamedWriteables() { - return List.of(); + @Override + public void close() throws IOException { + for (var service : services.values()) { + service.close(); } - - @Override - public void close() throws IOException {} } } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java deleted file mode 100644 index f0a990ded98ce..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; -import java.util.stream.Collectors; - -public class InferenceServiceRegistryImpl implements InferenceServiceRegistry { - - private final Map services; - private final List namedWriteables = new ArrayList<>(); - - public InferenceServiceRegistryImpl( - List inferenceServicePlugins, - InferenceServiceExtension.InferenceServiceFactoryContext factoryContext - ) { - // TODO check names are unique - services = inferenceServicePlugins.stream() - .flatMap(r -> r.getInferenceServiceFactories().stream()) - .map(factory -> factory.create(factoryContext)) - .collect(Collectors.toMap(InferenceService::name, Function.identity())); - } - - @Override - public void init(Client client) { - services.values().forEach(s -> s.init(client)); - } - - @Override - public Map getServices() { - return services; - } - - @Override - public Optional getService(String serviceName) { - return Optional.ofNullable(services.get(serviceName)); - } - - @Override - public List getNamedWriteables() { - return namedWriteables; - } - - @Override - public void close() throws IOException { - for (var service : services.values()) { - service.close(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java b/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java deleted file mode 100644 index fa90d5ba6f756..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.action.ActionListener; - -import java.util.List; -import java.util.Map; - -public interface ModelRegistry { - - /** - * Get a model. - * Secret settings are not included - * @param inferenceEntityId Model to get - * @param listener Model listener - */ - void getModel(String inferenceEntityId, ActionListener listener); - - /** - * Get a model with its secret settings - * @param inferenceEntityId Model to get - * @param listener Model listener - */ - void getModelWithSecrets(String inferenceEntityId, ActionListener listener); - - /** - * Get all models of a particular task type. - * Secret settings are not included - * @param taskType The task type - * @param listener Models listener - */ - void getModelsByTaskType(TaskType taskType, ActionListener> listener); - - /** - * Get all models. - * Secret settings are not included - * @param listener Models listener - */ - void getAllModels(ActionListener> listener); - - void storeModel(Model model, ActionListener listener); - - void deleteModel(String modelId, ActionListener listener); - - /** - * Semi parsed model where inference entity id, task type and service - * are known but the settings are not parsed. - */ - record UnparsedModel( - String inferenceEntityId, - TaskType taskType, - String service, - Map settings, - Map secrets - ) {} - - class NoopModelRegistry implements ModelRegistry { - @Override - public void getModel(String modelId, ActionListener listener) { - fail(listener); - } - - @Override - public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { - listener.onResponse(List.of()); - } - - @Override - public void getAllModels(ActionListener> listener) { - listener.onResponse(List.of()); - } - - @Override - public void storeModel(Model model, ActionListener listener) { - fail(listener); - } - - @Override - public void deleteModel(String modelId, ActionListener listener) { - fail(listener); - } - - @Override - public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { - fail(listener); - } - - private static void fail(ActionListener listener) { - listener.onFailure(new IllegalArgumentException("No model registry configured")); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index f406684c50948..be1906ab8d05e 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -83,6 +83,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -696,7 +697,7 @@ private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exce * @param onCompletion A callback executed once all documents have been processed. Accepts the thread * that ingestion completed on or an exception in the event that the entire operation * has failed. - * @param executorName Which executor the bulk request should be executed on. + * @param executor Which executor the bulk request should be executed on. */ public void executeBulkRequest( final int numberOfActionRequests, @@ -706,11 +707,11 @@ public void executeBulkRequest( final TriConsumer onStoreFailure, final BiConsumer onFailure, final BiConsumer onCompletion, - final String executorName + final Executor executor ) { assert numberOfActionRequests > 0 : "numberOfActionRequests must be greater than 0 but was [" + numberOfActionRequests + "]"; - threadPool.executor(executorName).execute(new AbstractRunnable() { + executor.execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 527acb8d4fcbc..e689898b05da6 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -621,7 +621,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerDoubleGauge( - "es.indexing.primary_operations.rejections.ratio", + "es.indexing.primary_operations.document.rejections.ratio", "Ratio of rejected primary operations", "ratio", () -> { @@ -629,13 +629,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { .map(NodeStats::getIndexingPressureStats) .map(IndexingPressureStats::getTotalPrimaryOps) .orElse(0L); - var totalPrimaryRejections = Optional.ofNullable(stats.getOrRefresh()) + var totalPrimaryDocumentRejections = Optional.ofNullable(stats.getOrRefresh()) .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getPrimaryRejections) + .map(IndexingPressureStats::getPrimaryDocumentRejections) .orElse(0L); - // rejections do not count towards `totalPrimaryOperations` - var totalOps = totalPrimaryOperations + totalPrimaryRejections; - return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryRejections / totalOps : 0.0); + // primary document rejections do not count towards `totalPrimaryOperations` + var totalOps = totalPrimaryOperations + totalPrimaryDocumentRejections; + return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryDocumentRejections / totalOps : 0.0); } ) ); diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 15ebe2752451d..5bf19c4b87157 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -127,8 +127,6 @@ import org.elasticsearch.indices.recovery.plan.PeerOnlyRecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.RecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; -import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.fs.FsHealthService; @@ -147,7 +145,6 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.HealthPlugin; -import org.elasticsearch.plugins.InferenceRegistryPlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MetadataUpgrader; @@ -1114,18 +1111,6 @@ record PluginServiceInstances( ); } - // Register noop versions of inference services if Inference plugin is not available - Optional inferenceRegistryPlugin = getSinglePlugin(InferenceRegistryPlugin.class); - modules.bindToInstance( - InferenceServiceRegistry.class, - inferenceRegistryPlugin.map(InferenceRegistryPlugin::getInferenceServiceRegistry) - .orElse(new InferenceServiceRegistry.NoopInferenceServiceRegistry()) - ); - modules.bindToInstance( - ModelRegistry.class, - inferenceRegistryPlugin.map(InferenceRegistryPlugin::getModelRegistry).orElse(new ModelRegistry.NoopModelRegistry()) - ); - injector = modules.createInjector(); postInjection(clusterModule, actionModule, clusterService, transportService, featureService); diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java index 3560f3f28076d..fca6a9b2dde7d 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java @@ -19,7 +19,7 @@ * Response upon a successful start or an persistent task */ public class PersistentTaskResponse extends ActionResponse { - private PersistentTask task; + private final PersistentTask task; public PersistentTaskResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java b/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java deleted file mode 100644 index 696c3a067dad1..0000000000000 --- a/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.plugins; - -import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; - -/** - * Plugins that provide inference services should implement this interface. - * There should be a single one in the classpath, as we currently support a single instance for ModelRegistry / InfereceServiceRegistry. - */ -public interface InferenceRegistryPlugin { - InferenceServiceRegistry getInferenceServiceRegistry(); - - ModelRegistry getModelRegistry(); -} diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 41e849b4d2ebd..5a33a958646df 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -680,7 +680,7 @@ protected BlobStore getBlobStore() { * maintains single lazy instance of {@link BlobContainer} */ protected BlobContainer blobContainer() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (lifecycle.started() == false) { throw notStartedException(); @@ -705,7 +705,7 @@ protected BlobContainer blobContainer() { * Public for testing. */ public BlobStore blobStore() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); BlobStore store = blobStore.get(); if (store == null) { @@ -1994,7 +1994,7 @@ public long getRestoreThrottleTimeInNanos() { return restoreRateLimitingTimeInNanos.count(); } - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // The Stateless plugin adds custom thread pools for object store operations assert ThreadPool.assertCurrentThreadPool( ThreadPool.Names.SNAPSHOT, @@ -3539,7 +3539,7 @@ public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotI @Override public void verify(String seed, DiscoveryNode localNode) { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (isReadOnly()) { try { latestIndexBlobId(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index a0796c0f95639..98895a49fae6e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -54,7 +54,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); if (DataStream.isFailureStoreEnabled()) { - boolean failureStore = request.paramAsBoolean("failure_store", false); + boolean failureStore = request.paramAsBoolean("target_failure_store", false); if (failureStore) { rolloverIndexRequest.setIndicesOptions( IndicesOptions.builder(rolloverIndexRequest.indicesOptions()) diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 69cc4f23f3956..0a7a4a9701a90 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; @@ -51,18 +50,11 @@ public class RestMultiSearchAction extends BaseRestHandler { private final boolean allowExplicitIndex; private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestMultiSearchAction( - Settings settings, - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -85,13 +77,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final MultiSearchRequest multiSearchRequest = parseRequest( - request, - namedWriteableRegistry, - allowExplicitIndex, - searchUsageHolder, - clusterSupportsFeature - ); + final MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature); return channel -> { final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); cancellableClient.execute( @@ -107,19 +93,11 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ public static MultiSearchRequest parseRequest( RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, boolean allowExplicitIndex, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature ) throws IOException { - return parseRequest( - restRequest, - namedWriteableRegistry, - allowExplicitIndex, - searchUsageHolder, - clusterSupportsFeature, - (k, v, r) -> false - ); + return parseRequest(restRequest, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature, (k, v, r) -> false); } /** @@ -128,7 +106,6 @@ public static MultiSearchRequest parseRequest( */ public static MultiSearchRequest parseRequest( RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, boolean allowExplicitIndex, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature, diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index cfb70da9fb454..3dbb98f7a7685 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; @@ -71,16 +70,10 @@ public class RestSearchAction extends BaseRestHandler { public static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM, INCLUDE_NAMED_QUERIES_SCORE_PARAM); private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -124,15 +117,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ IntConsumer setSize = size -> searchRequest.source().size(size); request.withContentOrSourceParamParserOrNull( - parser -> parseSearchRequest( - searchRequest, - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ) + parser -> parseSearchRequest(searchRequest, request, parser, clusterSupportsFeature, setSize, searchUsageHolder) ); return channel -> { @@ -148,7 +133,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC * @param request the rest request to read from * @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request} * parameter - * @param namedWriteableRegistry the registry of named writeables * @param clusterSupportsFeature used to check if certain features are available in this cluster * @param setSize how the size url parameter is handled. {@code udpate_by_query} and regular search differ here. */ @@ -156,11 +140,10 @@ public static void parseSearchRequest( SearchRequest searchRequest, RestRequest request, XContentParser requestContentParser, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, IntConsumer setSize ) throws IOException { - parseSearchRequest(searchRequest, request, requestContentParser, namedWriteableRegistry, clusterSupportsFeature, setSize, null); + parseSearchRequest(searchRequest, request, requestContentParser, clusterSupportsFeature, setSize, null); } /** @@ -170,8 +153,7 @@ public static void parseSearchRequest( * @param request the rest request to read from * @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request} * parameter, will be null when there is no request body to parse - * @param namedWriteableRegistry the registry of named writeables - @param clusterSupportsFeature used to check if certain features are available in this cluster + * @param clusterSupportsFeature used to check if certain features are available in this cluster * @param setSize how the size url parameter is handled. {@code udpate_by_query} and regular search differ here. * @param searchUsageHolder the holder of search usage stats */ @@ -179,7 +161,6 @@ public static void parseSearchRequest( SearchRequest searchRequest, RestRequest request, @Nullable XContentParser requestContentParser, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, IntConsumer setSize, @Nullable SearchUsageHolder searchUsageHolder diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index c423b2ca8cb51..11bd63bcdaa8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.DelayedBucket; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -112,23 +113,6 @@ private B reduceBucket(List buckets, AggregationReduceContext context) { return createBucket(docCount, aggs, docCountError, buckets.get(0)); } - private BucketOrder getReduceOrder(List aggregations) { - BucketOrder thisReduceOrder = null; - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().size() == 0) { - continue; - } - if (thisReduceOrder == null) { - thisReduceOrder = terms.getReduceOrder(); - } else if (thisReduceOrder.equals(terms.getReduceOrder()) == false) { - return getOrder(); - } - } - return thisReduceOrder != null ? thisReduceOrder : getOrder(); - } - private long getDocCountError(A terms) { int size = terms.getBuckets().size(); if (size == 0 || size < terms.getShardSize() || isKeyOrder(terms.getOrder())) { @@ -154,47 +138,37 @@ private long getDocCountError(A terms) { * @return the order we used to reduce the buckets */ private BucketOrder reduceBuckets( - List aggregations, + List> bucketsList, + BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, Consumer> sink ) { - /* - * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. - * That allows to perform a merge sort when reducing multiple aggregations together. - * For backward compatibility, we disable the merge sort and use ({@link #reduceLegacy} if any of - * the provided aggregations use a different {@link #reduceOrder}. - */ - BucketOrder thisReduceOrder = getReduceOrder(aggregations); if (isKeyOrder(thisReduceOrder)) { // extract the primary sort in case this is a compound order. thisReduceOrder = InternalOrder.key(isKeyAsc(thisReduceOrder)); - reduceMergeSort(aggregations, thisReduceOrder, reduceContext, sink); + reduceMergeSort(bucketsList, thisReduceOrder, reduceContext, sink); } else { - reduceLegacy(aggregations, reduceContext, sink); + reduceLegacy(bucketsList, reduceContext, sink); } return thisReduceOrder; } private void reduceMergeSort( - List aggregations, + List> bucketsList, BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, Consumer> sink ) { assert isKeyOrder(thisReduceOrder); final Comparator cmp = thisReduceOrder.comparator(); - final PriorityQueue> pq = new PriorityQueue<>(aggregations.size()) { + final PriorityQueue> pq = new PriorityQueue<>(bucketsList.size()) { @Override protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { return cmp.compare(a.current(), b.current()) < 0; } }; - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().isEmpty() == false) { - pq.add(new IteratorAndCurrent<>(terms.getBuckets().iterator())); - } + for (List buckets : bucketsList) { + pq.add(new IteratorAndCurrent<>(buckets.iterator())); } // list of buckets coming from different shards that have the same key List sameTermBuckets = new ArrayList<>(); @@ -228,19 +202,11 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { } } - private void reduceLegacy( - List aggregations, - AggregationReduceContext reduceContext, - Consumer> sink - ) { - Map> bucketMap = new HashMap<>(); - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().isEmpty() == false) { - for (B bucket : terms.getBuckets()) { - bucketMap.computeIfAbsent(bucket.getKey(), k -> new ArrayList<>()).add(bucket); - } + private void reduceLegacy(List> bucketsList, AggregationReduceContext reduceContext, Consumer> sink) { + final Map> bucketMap = new HashMap<>(); + for (List buckets : bucketsList) { + for (B bucket : buckets) { + bucketMap.computeIfAbsent(bucket.getKey(), k -> new ArrayList<>()).add(bucket); } } for (List sameTermBuckets : bucketMap.values()) { @@ -248,21 +214,49 @@ private void reduceLegacy( } } - public InternalAggregation doReduce(List aggregations, AggregationReduceContext reduceContext) { - long sumDocCountError = 0; - long[] otherDocCount = new long[] { 0 }; - A referenceTerms = null; - for (InternalAggregation aggregation : aggregations) { + public final AggregatorReducer termsAggregationReducer(AggregationReduceContext reduceContext, int size) { + return new TermsAggregationReducer(reduceContext, size); + } + + private class TermsAggregationReducer implements AggregatorReducer { + private final List> bucketsList; + private final AggregationReduceContext reduceContext; + + private long sumDocCountError = 0; + private final long[] otherDocCount = new long[] { 0 }; + private A referenceTerms = null; + /* + * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. + * That allows to perform a merge sort when reducing multiple aggregations together. + * For backward compatibility, we disable the merge sort and use ({@link #reduceLegacy} if any of + * the provided aggregations use a different {@link #reduceOrder}. + */ + private BucketOrder thisReduceOrder = null; + + private TermsAggregationReducer(AggregationReduceContext reduceContext, int size) { + bucketsList = new ArrayList<>(size); + this.reduceContext = reduceContext; + } + + @Override + public void accept(InternalAggregation aggregation) { + if (aggregation.canLeadReduction() == false) { + return; + } @SuppressWarnings("unchecked") A terms = (A) aggregation; - if (referenceTerms == null && terms.canLeadReduction()) { + if (referenceTerms == null) { referenceTerms = terms; - } - if (referenceTerms != null && referenceTerms.getClass().equals(terms.getClass()) == false && terms.canLeadReduction()) { + } else if (referenceTerms.getClass().equals(terms.getClass()) == false) { // control gets into this loop when the same field name against which the query is executed // is of different types in different indices. throw AggregationErrors.reduceTypeMismatch(referenceTerms.getName(), Optional.empty()); } + if (thisReduceOrder == null) { + thisReduceOrder = terms.getReduceOrder(); + } else if (thisReduceOrder != getOrder() && thisReduceOrder.equals(terms.getReduceOrder()) == false) { + thisReduceOrder = getOrder(); + } otherDocCount[0] += terms.getSumOfOtherDocCounts(); final long thisAggDocCountError = getDocCountError(terms); if (sumDocCountError != -1) { @@ -283,52 +277,63 @@ public InternalAggregation doReduce(List aggregations, Aggr // later in this method. bucket.updateDocCountError(-thisAggDocCountError); } + if (terms.getBuckets().isEmpty() == false) { + bucketsList.add(terms.getBuckets()); + } } - BucketOrder thisReduceOrder; - List result; - if (reduceContext.isFinalReduce()) { - TopBucketBuilder top = TopBucketBuilder.build( - getRequiredSize(), - getOrder(), - removed -> otherDocCount[0] += removed.getDocCount() - ); - thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - if (bucket.getDocCount() >= getMinDocCount()) { - top.add(bucket); - } - }); - result = top.build(); - } else { - /* - * We can prune the list on partial reduce if the aggregation is ordered - * by key and not filtered on doc count. The results come in key order - * so we can just stop iteration early. - */ - boolean canPrune = isKeyOrder(getOrder()) && getMinDocCount() == 0; - result = new ArrayList<>(); - thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - if (canPrune == false || result.size() < getRequiredSize()) { - result.add(bucket.reduced()); + @Override + public InternalAggregation get() { + BucketOrder thisReduceOrder; + List result; + if (isKeyOrder(getOrder()) && getMinDocCount() <= 1) { + /* + * the aggregation is order by key and not filtered on doc count. The results come in key order + * so we can just have an optimize collection. + */ + result = new ArrayList<>(); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> { + if (result.size() < getRequiredSize()) { + result.add(bucket.reduced()); + } else { + otherDocCount[0] += bucket.getDocCount(); + } + }); + } else if (reduceContext.isFinalReduce()) { + TopBucketBuilder top = TopBucketBuilder.build( + getRequiredSize(), + getOrder(), + removed -> otherDocCount[0] += removed.getDocCount() + ); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> { + if (bucket.getDocCount() >= getMinDocCount()) { + top.add(bucket); + } + }); + result = top.build(); + } else { + result = new ArrayList<>(); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> result.add(bucket.reduced())); + } + for (B r : result) { + if (sumDocCountError == -1) { + r.setDocCountError(-1); } else { - otherDocCount[0] += bucket.getDocCount(); + r.updateDocCountError(sumDocCountError); } - }); - } - for (B r : result) { + } + long docCountError; if (sumDocCountError == -1) { - r.setDocCountError(-1); + docCountError = -1; } else { - r.updateDocCountError(sumDocCountError); + docCountError = bucketsList.size() == 1 ? 0 : sumDocCountError; } + return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } - long docCountError; - if (sumDocCountError == -1) { - docCountError = -1; - } else { - docCountError = aggregations.size() == 1 ? 0 : sumDocCountError; + + private BucketOrder getThisReduceOrder() { + return thisReduceOrder == null ? getOrder() : thisReduceOrder; } - return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java index 6710dd51a3dd7..2e40ab35b21c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -18,7 +19,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -190,21 +190,25 @@ protected DoubleTerms create(String name, List buckets, BucketOrder redu @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final List aggregations = new ArrayList<>(); + private final AggregatorReducer processor = termsAggregationReducer(reduceContext, size); @Override public void accept(InternalAggregation aggregation) { if (aggregation instanceof LongTerms longTerms) { - DoubleTerms dTerms = LongTerms.convertLongTermsToDouble(longTerms, format); - aggregations.add(dTerms); + processor.accept(LongTerms.convertLongTermsToDouble(longTerms, format)); } else { - aggregations.add(aggregation); + processor.accept(aggregation); } } @Override public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); + return processor.get(); + } + + @Override + public void close() { + Releasables.close(processor); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index b0af2c3d4e618..76f33b1c0e726 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -213,8 +213,8 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont } return new AggregatorReducer() { - final List aggregations = new ArrayList<>(size); - boolean isPromotedToDouble = false; + private List aggregations = new ArrayList<>(size); + private boolean isPromotedToDouble = false; @Override public void accept(InternalAggregation aggregation) { @@ -243,7 +243,16 @@ private void promoteToDouble(List aggregations) { @Override public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); + try ( + AggregatorReducer processor = ((AbstractInternalTerms) aggregations.get(0)).termsAggregationReducer( + reduceContext, + size + ) + ) { + aggregations.forEach(processor::accept); + aggregations = null; // release memory + return processor.get(); + } } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index 44bbf62c7cb19..aa3788f241079 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -14,12 +14,10 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -153,19 +151,7 @@ public StringTerms(StreamInput in) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { - return new AggregatorReducer() { - private final List aggregations = new ArrayList<>(size); - - @Override - public void accept(InternalAggregation aggregation) { - aggregations.add(aggregation); - } - - @Override - public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); - } - }; + return termsAggregationReducer(reduceContext, size); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java index d6950df962433..483285dba1fa7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.LegacyTypeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -73,16 +74,19 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { if (storedFieldsContext.fieldNames() != null) { SearchExecutionContext sec = fetchContext.getSearchExecutionContext(); for (String field : storedFieldsContext.fieldNames()) { - if (SourceFieldMapper.NAME.equals(field) == false) { - Collection fieldNames = sec.getMatchingFieldNames(field); - for (String fieldName : fieldNames) { - MappedFieldType ft = sec.getFieldType(fieldName); - if (ft.isStored() == false) { - continue; - } - storedFields.add(new StoredField(fieldName, ft, sec.isMetadataField(ft.name()))); - fieldsToLoad.add(ft.name()); + Collection fieldNames = sec.getMatchingFieldNames(field); + for (String fieldName : fieldNames) { + // _id and _source are always retrieved anyway, no need to do it explicitly. See FieldsVisitor. + // They are not returned as part of HitContext#loadedFields hence they are not added to documents by this sub-phase + if (IdFieldMapper.NAME.equals(field) || SourceFieldMapper.NAME.equals(field)) { + continue; + } + MappedFieldType ft = sec.getFieldType(fieldName); + if (ft.isStored() == false) { + continue; } + storedFields.add(new StoredField(fieldName, ft, sec.isMetadataField(ft.name()))); + fieldsToLoad.add(ft.name()); } } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 4b6e3f30fe6fa..a597901d4600e 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -717,6 +717,7 @@ static DataStream updateDataStream(DataStream dataStream, Metadata.Builder metad dataStream.getLifecycle(), dataStream.isFailureStore(), dataStream.getFailureIndices(), + dataStream.rolloverOnWrite(), dataStream.getAutoShardingEvent() ); } diff --git a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java index 9668228ac0ec3..544b085a7006d 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java @@ -24,6 +24,9 @@ /** * A builder for fixed executors. + * + * Builds an Executor with a static number of threads, as opposed to {@link ScalingExecutorBuilder} that dynamically scales the number of + * threads in the pool up and down based on request load. */ public final class FixedExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java index 07504bc5f9d2e..29a7d5df08b7b 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java @@ -24,6 +24,10 @@ /** * A builder for scaling executors. + * + * The {@link #build} method will instantiate a java {@link ExecutorService} thread pool that starts with the specified minimum number of + * threads and then scales up to the specified max number of threads as needed for excess work, scaling back when the burst of activity + * stops. As opposed to the {@link FixedExecutorBuilder} that keeps a fixed number of threads alive. */ public final class ScalingExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index cf554fe81d4a3..ceda140827527 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.node.Node; import org.elasticsearch.node.ReportingService; import org.elasticsearch.telemetry.metric.Instrument; @@ -59,13 +60,28 @@ import static java.util.Map.entry; import static org.elasticsearch.core.Strings.format; +/** + * Manages all the Java thread pools we create. {@link Names} contains a list of the thread pools, but plugins can dynamically add more + * thread pools to instantiate. + */ public class ThreadPool implements ReportingService, Scheduler { private static final Logger logger = LogManager.getLogger(ThreadPool.class); + /** + * List of names that identify Java thread pools that are created in {@link ThreadPool#ThreadPool}. + */ public static class Names { - public static final String SAME = "same"; + /** + * All the tasks that do not relate to the purpose of one of the other thread pools should use this thread pool. Try to pick one of + * the other more specific thread pools where possible. + */ public static final String GENERIC = "generic"; + /** + * Important management tasks that keep the cluster from falling apart. + * This thread pool ensures cluster coordination tasks do not get blocked by less critical tasks and can continue to make progress. + * This thread pool also defaults to a single thread, reducing contention on the Coordinator mutex. + */ public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String GET = "get"; public static final String ANALYZE = "analyze"; @@ -75,6 +91,10 @@ public static class Names { public static final String SEARCH_COORDINATION = "search_coordination"; public static final String AUTO_COMPLETE = "auto_complete"; public static final String SEARCH_THROTTLED = "search_throttled"; + /** + * Cluster management tasks. Tasks that manage data, and tasks that report on cluster health via statistics etc. + * Not a latency sensitive thread pool: some tasks may time be long-running; and the thread pool size is limited / relatively small. + */ public static final String MANAGEMENT = "management"; public static final String FLUSH = "flush"; public static final String REFRESH = "refresh"; @@ -99,9 +119,13 @@ public static class Names { public static final String THREAD_POOL_METRIC_NAME_REJECTED = ".threads.rejected.total"; public enum ThreadPoolType { + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 DIRECT("direct"), FIXED("fixed"), - FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), // TODO: remove in 9.0 + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 + FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), SCALING("scaling"); private final String type; @@ -127,7 +151,6 @@ public static ThreadPoolType fromType(String type) { } public static final Map THREAD_POOL_TYPES = Map.ofEntries( - entry(Names.SAME, ThreadPoolType.DIRECT), entry(Names.GENERIC, ThreadPoolType.SCALING), entry(Names.GET, ThreadPoolType.FIXED), entry(Names.ANALYZE, ThreadPoolType.FIXED), @@ -151,6 +174,8 @@ public static ThreadPoolType fromType(String type) { entry(Names.SYSTEM_CRITICAL_WRITE, ThreadPoolType.FIXED) ); + public static final double searchAutoscalingEWMA = 0.1; + private final Map executors; private final ThreadPoolInfo threadPoolInfo; @@ -194,6 +219,13 @@ public Collection builders() { Setting.Property.NodeScope ); + /** + * Defines and builds the many thread pools delineated in {@link Names}. + * + * @param settings + * @param meterRegistry + * @param customBuilders a list of additional thread pool builders that were defined elsewhere (like a Plugin). + */ @SuppressWarnings({ "rawtypes", "unchecked" }) public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final ExecutorBuilder... customBuilders) { assert Node.NODE_NAME_SETTING.exists(settings); @@ -222,7 +254,13 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex builders.put(Names.ANALYZE, new FixedExecutorBuilder(settings, Names.ANALYZE, 1, 16, TaskTrackingConfig.DO_NOT_TRACK)); builders.put( Names.SEARCH, - new FixedExecutorBuilder(settings, Names.SEARCH, searchOrGetThreadPoolSize, 1000, TaskTrackingConfig.DEFAULT) + new FixedExecutorBuilder( + settings, + Names.SEARCH, + searchOrGetThreadPoolSize, + 1000, + new TaskTrackingConfig(true, searchAutoscalingEWMA) + ) ); builders.put( Names.SEARCH_WORKER, @@ -230,7 +268,13 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex ); builders.put( Names.SEARCH_COORDINATION, - new FixedExecutorBuilder(settings, Names.SEARCH_COORDINATION, halfProc, 1000, TaskTrackingConfig.DEFAULT) + new FixedExecutorBuilder( + settings, + Names.SEARCH_COORDINATION, + halfProc, + 1000, + new TaskTrackingConfig(true, searchAutoscalingEWMA) + ) ); builders.put( Names.AUTO_COMPLETE, @@ -310,6 +354,7 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex threadContext = new ThreadContext(settings); + // Now that all the thread pools have been defined, actually build them. final Map executors = new HashMap<>(); for (final Map.Entry entry : builders.entrySet()) { final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings); @@ -321,16 +366,10 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex executors.put(entry.getKey(), executorHolder); } - executors.put(Names.SAME, new ExecutorHolder(EsExecutors.DIRECT_EXECUTOR_SERVICE, new Info(Names.SAME, ThreadPoolType.DIRECT))); this.executors = Map.copyOf(executors); this.executors.forEach((k, v) -> instruments.put(k, setupMetrics(meterRegistry, k, v))); this.instruments = instruments; - final List infos = executors.values() - .stream() - .filter(holder -> holder.info.getName().equals("same") == false) - .map(holder -> holder.info) - .toList(); - this.threadPoolInfo = new ThreadPoolInfo(infos); + this.threadPoolInfo = new ThreadPoolInfo(executors.values().stream().map(holder -> holder.info).toList()); this.scheduler = Scheduler.initScheduler(settings, "scheduler"); this.slowSchedulerWarnThresholdNanos = SLOW_SCHEDULER_TASK_WARN_THRESHOLD_SETTING.get(settings).nanos(); this.cachedTimeThread = new CachedTimeThread( @@ -467,10 +506,6 @@ public ThreadPoolStats stats() { List stats = new ArrayList<>(); for (ExecutorHolder holder : executors.values()) { final String name = holder.info.getName(); - // no need to have info on "same" thread pool - if ("same".equals(name)) { - continue; - } int threads = -1; int queue = -1; int active = -1; @@ -895,6 +930,11 @@ void check(long newAbsoluteMillis, long newRelativeNanos) { } } + /** + * Holds a thread pool and additional ES information ({@link Info}) about that Java thread pool ({@link ExecutorService}) instance. + * + * See {@link Names} for a list of thread pools, though there can be more dynamically added via plugins. + */ static class ExecutorHolder { private final ExecutorService executor; public final Info info; @@ -910,6 +950,9 @@ ExecutorService executor() { } } + /** + * The settings used to create a Java ExecutorService thread pool. + */ public static class Info implements Writeable, ToXContentFragment { private final String name; diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec new file mode 100644 index 0000000000000..b99a15507f742 --- /dev/null +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -0,0 +1 @@ +org.elasticsearch.index.codec.Elasticsearch814Codec diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 679270e90e894..4bae460e3bce2 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -115,3 +115,4 @@ 8.12.2,8560001 8.13.0,8595000 8.13.1,8595000 +8.13.2,8595000 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index b60066601bf68..61cc2167a9048 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -115,3 +115,4 @@ 8.12.2,8500010 8.13.0,8503000 8.13.1,8503000 +8.13.2,8503000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index e4b821fba7634..b91ea304c5da6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1038,6 +1038,7 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 9ddcf8a596226..67cba13661e34 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -563,7 +563,6 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception { responseLatch.await(10, TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107043") public void testFailedTasksCount() throws Exception { Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build(); setupTestNodes(settings); @@ -605,14 +604,14 @@ protected NodeResponse nodeOperation(NodeRequest request, Task task) { // Make sure that actions are still registered in the task manager on all nodes // Twice on the coordinating node and once on all other nodes. - assertEquals(4, listeners[0].getEvents().size()); - assertEquals(2, listeners[0].getRegistrationEvents().size()); - assertEquals(2, listeners[0].getUnregistrationEvents().size()); - for (int i = 1; i < listeners.length; i++) { - assertEquals(2, listeners[i].getEvents().size()); - assertEquals(1, listeners[i].getRegistrationEvents().size()); - assertEquals(1, listeners[i].getUnregistrationEvents().size()); - } + assertBusy(() -> { + assertEquals(2, listeners[0].getRegistrationEvents().size()); + assertEquals(2, listeners[0].getUnregistrationEvents().size()); + for (int i = 1; i < listeners.length; i++) { + assertEquals(1, listeners[i].getRegistrationEvents().size()); + assertEquals(1, listeners[i].getUnregistrationEvents().size()); + } + }); } private List getAllTaskDescriptions() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java index 0ee3b244ecf45..adba547f9b2ab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java @@ -113,7 +113,7 @@ public void testIndexPressureStats() throws Exception { randomValueOtherThanMany(n -> n.getIndexingPressureStats() == null, NodeStatsTests::createNodeStats), randomValueOtherThanMany(n -> n.getIndexingPressureStats() == null, NodeStatsTests::createNodeStats) ); - long[] expectedStats = new long[12]; + long[] expectedStats = new long[13]; for (NodeStats nodeStat : nodeStats) { IndexingPressureStats indexingPressureStats = nodeStat.getIndexingPressureStats(); if (indexingPressureStats != null) { @@ -130,8 +130,9 @@ public void testIndexPressureStats() throws Exception { expectedStats[8] += indexingPressureStats.getCoordinatingRejections(); expectedStats[9] += indexingPressureStats.getPrimaryRejections(); expectedStats[10] += indexingPressureStats.getReplicaRejections(); + expectedStats[11] += indexingPressureStats.getPrimaryDocumentRejections(); - expectedStats[11] += indexingPressureStats.getMemoryLimit(); + expectedStats[12] += indexingPressureStats.getMemoryLimit(); } } @@ -181,9 +182,12 @@ public void testIndexPressureStats() throws Exception { + "," + "\"replica_rejections\":" + expectedStats[10] + + "," + + "\"primary_document_rejections\":" + + expectedStats[11] + "}," + "\"limit_in_bytes\":" - + expectedStats[11] + + expectedStats[12] + "}" + "}}" ) diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 2226c40b618f4..23395556761f1 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.IndexNotFoundException; @@ -48,7 +49,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpNodeClient; import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Assume; import org.junit.Before; @@ -843,7 +843,7 @@ private BulkOperation newBulkOperation( return new BulkOperation( null, threadPool, - ThreadPool.Names.SAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, clusterService, request, client, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index 3057b00553a22..20d826b11c1e7 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -39,6 +39,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.Function; @@ -137,7 +138,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index b97e8303a8eb5..52d50b3a23a0d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -47,9 +47,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.junit.Before; @@ -57,13 +55,18 @@ import org.mockito.Captor; import org.mockito.MockitoAnnotations; -import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Predicate; @@ -73,6 +76,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -95,6 +99,9 @@ public class TransportBulkActionIngestTests extends ESTestCase { private static final Thread DUMMY_WRITE_THREAD = new Thread(ThreadPool.Names.WRITE); private FeatureService mockFeatureService; + private static final ExecutorService writeExecutor = new NamedDirectExecutorService("write"); + private static final ExecutorService systemWriteExecutor = new NamedDirectExecutorService("system_write"); + /** Services needed by bulk action */ TransportService transportService; ClusterService clusterService; @@ -158,7 +165,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { @@ -186,13 +193,95 @@ class TestSingleItemBulkWriteAction extends TransportSingleItemBulkWriteAction shutdownNow() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isShutdown() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isTerminated() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean awaitTermination(long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Callable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task, T result) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public List> invokeAll(Collection> tasks) { + return null; + } + + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + } + @Before - public void setupAction() throws IOException { + public void setupAction() { // initialize captors, which must be members to use @Capture because of generics threadPool = mock(ThreadPool.class); + when(threadPool.executor(eq(ThreadPool.Names.WRITE))).thenReturn(writeExecutor); + when(threadPool.executor(eq(ThreadPool.Names.SYSTEM_WRITE))).thenReturn(systemWriteExecutor); MockitoAnnotations.openMocks(this); // setup services that will be called by action - transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); + transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(threadPool); clusterService = mock(ClusterService.class); localIngest = true; // setup nodes for local and remote @@ -312,7 +401,7 @@ public void testIngestLocal() throws Exception { redirectHandler.capture(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -360,7 +449,7 @@ public void testSingleItemBulkActionIngestLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -408,7 +497,7 @@ public void testIngestSystemLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.SYSTEM_WRITE) + same(systemWriteExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -567,7 +656,7 @@ private void validatePipelineWithBulkUpsert(@Nullable String indexRequestIndexNa any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest1.getPipeline(), "default_pipeline"); assertEquals(indexRequest2.getPipeline(), "default_pipeline"); @@ -617,7 +706,7 @@ public void testDoExecuteCalledTwiceCorrectly() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertFalse(action.indexCreated); // still no index yet, the ingest node failed. @@ -713,7 +802,7 @@ public void testFindDefaultPipelineFromTemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -753,7 +842,7 @@ public void testFindDefaultPipelineFromV2TemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -782,7 +871,7 @@ public void testIngestCallbackExceptionHandled() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); indexRequest1.autoGenerateId(); completionHandler.getValue().accept(Thread.currentThread(), null); @@ -821,7 +910,7 @@ private void validateDefaultPipeline(IndexRequest indexRequest) { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest.getPipeline(), "default_pipeline"); completionHandler.getValue().accept(null, exception); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 1a16d9083df55..960397033f602 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -59,12 +59,14 @@ import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.action.bulk.TransportBulkAction.prohibitCustomRoutingOnDataStream; import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamServiceTests.createDataStream; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.junit.Assume.assumeThat; import static org.mockito.ArgumentMatchers.any; @@ -321,31 +323,45 @@ public void testOnlySystem() { assertFalse(TransportBulkAction.isOnlySystem(buildBulkRequest(mixed), indicesLookup, systemIndices)); } - public void testRejectCoordination() throws Exception { + private void blockWriteThreadPool(CountDownLatch blockingLatch) { + assertThat(blockingLatch.getCount(), greaterThan(0L)); + final var executor = threadPool.executor(ThreadPool.Names.WRITE); + // Add tasks repeatedly until we get an EsRejectedExecutionException which indicates that the threadpool and its queue are full. + expectThrows(EsRejectedExecutionException.class, () -> { + // noinspection InfiniteLoopStatement + while (true) { + executor.execute(() -> safeAwait(blockingLatch)); + } + }); + } + + public void testRejectCoordination() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); + final var blockingLatch = new CountDownLatch(1); try { - threadPool.startForcingRejections(); + blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } - public void testRejectionAfterCreateIndexIsPropagated() throws Exception { + public void testRejectionAfterCreateIndexIsPropagated() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); bulkAction.failIndexCreation = randomBoolean(); + final var blockingLatch = new CountDownLatch(1); try { - bulkAction.beforeIndexCreation = threadPool::startForcingRejections; + bulkAction.beforeIndexCreation = () -> blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); assertTrue(bulkAction.indexCreated); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index cb9bdd1f3a827..09513351652b8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -50,6 +50,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.Map; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.LongSupplier; @@ -140,12 +141,12 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { expected.set(1000000); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } else { @@ -165,13 +166,13 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { long elapsed = spinForAtLeastOneMillisecond(); expected.set(elapsed); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 2657bdef8c09d..fc9e9f05542c9 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -200,7 +200,7 @@ public void onFailure(Exception e) { bulkAction.createMissingIndicesAndIndexData( task, bulkRequest, - randomAlphaOfLength(10), + r -> fail("executor is unused"), listener, indicesToAutoCreate, dataStreamsToRollover, diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 0d1104279d3ce..70e291afcaf32 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -794,6 +794,7 @@ private DataStream createDataStream( null, false, List.of(), + false, autoShardingEvent ); } diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index ed807091ae9a2..8bfd61b8d5b32 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import org.junit.Assert; @@ -127,12 +126,6 @@ public SearchTransportService getSearchTransport() { return searchTransport; } - @Override - public ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { - Assert.fail("should not be called"); - return null; - } - @Override public void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPhase) { try { diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index 5f24f72d5cc8f..a45730a82dbc2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -97,7 +97,7 @@ public void testFailWithUnknownKey() { ).build(); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> RestMultiSearchAction.parseRequest(restRequest, null, true, new UsageService().getSearchUsageHolder(), nf -> false) + () -> RestMultiSearchAction.parseRequest(restRequest, true, new UsageService().getSearchUsageHolder(), nf -> false) ); assertEquals("key [unknown_key] is not supported in the metadata section", ex.getMessage()); } @@ -113,7 +113,6 @@ public void testSimpleAddWithCarriageReturn() throws Exception { ).build(); MultiSearchRequest request = RestMultiSearchAction.parseRequest( restRequest, - null, true, new UsageService().getSearchUsageHolder(), nf -> false @@ -137,7 +136,6 @@ public void testDefaultIndicesOptions() throws IOException { ).withParams(Collections.singletonMap("ignore_unavailable", "true")).build(); MultiSearchRequest request = RestMultiSearchAction.parseRequest( restRequest, - null, true, new UsageService().getSearchUsageHolder(), nf -> false @@ -250,7 +248,7 @@ public void testMsearchTerminatedByNewline() throws Exception { ).build(); IllegalArgumentException expectThrows = expectThrows( IllegalArgumentException.class, - () -> RestMultiSearchAction.parseRequest(restRequest, null, true, new UsageService().getSearchUsageHolder(), nf -> false) + () -> RestMultiSearchAction.parseRequest(restRequest, true, new UsageService().getSearchUsageHolder(), nf -> false) ); assertEquals("The msearch request must be terminated by a newline [\n]", expectThrows.getMessage()); @@ -261,7 +259,6 @@ public void testMsearchTerminatedByNewline() throws Exception { ).build(); MultiSearchRequest msearchRequest = RestMultiSearchAction.parseRequest( restRequestWithNewLine, - null, true, new UsageService().getSearchUsageHolder(), nf -> false diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 3187a3e391691..f086b52c1b491 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -96,8 +96,9 @@ protected DataStream mutateInstance(DataStream instance) { var lifecycle = instance.getLifecycle(); var failureStore = instance.isFailureStore(); var failureIndices = instance.getFailureIndices(); + var rolloverOnWrite = instance.rolloverOnWrite(); var autoShardingEvent = instance.getAutoShardingEvent(); - switch (between(0, 11)) { + switch (between(0, 12)) { case 0 -> name = randomAlphaOfLength(10); case 1 -> indices = randomNonEmptyIndexInstances(); case 2 -> generation = instance.getGeneration() + randomIntBetween(1, 10); @@ -110,7 +111,11 @@ protected DataStream mutateInstance(DataStream instance) { isHidden = true; } } - case 5 -> isReplicated = isReplicated == false; + case 5 -> { + isReplicated = isReplicated == false; + // Replicated data streams cannot be marked for lazy rollover. + rolloverOnWrite = isReplicated == false && rolloverOnWrite; + } case 6 -> { if (isSystem == false) { isSystem = true; @@ -131,6 +136,10 @@ protected DataStream mutateInstance(DataStream instance) { failureStore = failureIndices.isEmpty() == false; } case 11 -> { + rolloverOnWrite = rolloverOnWrite == false; + isReplicated = rolloverOnWrite == false && isReplicated; + } + case 12 -> { autoShardingEvent = randomBoolean() && autoShardingEvent != null ? null : new DataStreamAutoShardingEvent( @@ -154,6 +163,7 @@ protected DataStream mutateInstance(DataStream instance) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -212,6 +222,7 @@ public void testRolloverUpgradeToTsdbDataStream() { ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), + ds.rolloverOnWrite(), ds.getAutoShardingEvent() ); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); @@ -240,6 +251,7 @@ public void testRolloverDowngradeToRegularDataStream() { ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), + ds.rolloverOnWrite(), ds.getAutoShardingEvent() ); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); @@ -616,19 +628,21 @@ public void testSnapshot() { postSnapshotIndices.removeAll(indicesToRemove); postSnapshotIndices.addAll(indicesToAdd); + var replicated = preSnapshotDataStream.isReplicated() && randomBoolean(); var postSnapshotDataStream = new DataStream( preSnapshotDataStream.getName(), postSnapshotIndices, preSnapshotDataStream.getGeneration() + randomIntBetween(0, 5), preSnapshotDataStream.getMetadata() == null ? null : new HashMap<>(preSnapshotDataStream.getMetadata()), preSnapshotDataStream.isHidden(), - preSnapshotDataStream.isReplicated() && randomBoolean(), + replicated, preSnapshotDataStream.isSystem(), preSnapshotDataStream.isAllowCustomRouting(), preSnapshotDataStream.getIndexMode(), preSnapshotDataStream.getLifecycle(), preSnapshotDataStream.isFailureStore(), preSnapshotDataStream.getFailureIndices(), + replicated == false && preSnapshotDataStream.rolloverOnWrite(), preSnapshotDataStream.getAutoShardingEvent() ); @@ -670,6 +684,7 @@ public void testSnapshotWithAllBackingIndicesRemoved() { preSnapshotDataStream.getLifecycle(), preSnapshotDataStream.isFailureStore(), preSnapshotDataStream.getFailureIndices(), + preSnapshotDataStream.rolloverOnWrite(), preSnapshotDataStream.getAutoShardingEvent() ); @@ -1896,13 +1911,14 @@ private IndexMetadata createIndexMetadata(String indexName, IndexWriteLoad index public void testWriteFailureIndex() { boolean hidden = randomBoolean(); boolean system = hidden && randomBoolean(); + boolean replicated = randomBoolean(); DataStream noFailureStoreDataStream = new DataStream( randomAlphaOfLength(10), randomNonEmptyIndexInstances(), randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1910,7 +1926,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), false, null, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(noFailureStoreDataStream.getFailureStoreWriteIndex(), nullValue()); @@ -1921,7 +1937,7 @@ public void testWriteFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1929,7 +1945,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, List.of(), - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStreamWithEmptyFailureIndices.getFailureStoreWriteIndex(), nullValue()); @@ -1947,7 +1963,7 @@ public void testWriteFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1955,7 +1971,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStream.getFailureStoreWriteIndex(), is(writeFailureIndex)); @@ -1965,13 +1981,14 @@ public void testIsFailureIndex() { boolean hidden = randomBoolean(); boolean system = hidden && randomBoolean(); List backingIndices = randomNonEmptyIndexInstances(); + boolean replicated = randomBoolean(); DataStream noFailureStoreDataStream = new DataStream( randomAlphaOfLength(10), backingIndices, randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1979,7 +1996,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), false, null, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat( @@ -1994,7 +2011,7 @@ public void testIsFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -2002,7 +2019,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, List.of(), - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat( @@ -2026,7 +2043,7 @@ public void testIsFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -2034,7 +2051,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStream.isFailureStoreIndex(writeFailureIndex.getName()), is(true)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index a1eeceba8a390..2fba37772ef94 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -1217,9 +1217,9 @@ public void testHiddenAliasAndHiddenIndexResolution() { indexNames = indexNameExpressionResolver.concreteIndexNames(state, includeHiddenOptions, visibleAlias); assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex)); - // A total wildcards does not resolve the hidden index in this case + // total wildcards should also resolve both visible and hidden indices if there is a visible alias indexNames = indexNameExpressionResolver.concreteIndexNames(state, excludeHiddenOptions, "*"); - assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex)); + assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex)); } { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 71306d7fe0aef..1fe1f6350445c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -357,6 +357,7 @@ public void testRemoveBrokenBackingIndexReference() { original.getLifecycle(), original.isFailureStore(), original.getFailureIndices(), + original.rolloverOnWrite(), original.getAutoShardingEvent() ); var brokenState = ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(broken).build()).build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 2406eb8e76ab9..c7a30e3eae548 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -280,6 +280,130 @@ public void testAll() { assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } + public void testAllAliases() { + { + // hidden index with hidden alias should not be returned + Metadata.Builder mdBuilder = Metadata.builder() + .put( + indexBuilder("index-hidden-alias", true) // index hidden + .state(State.OPEN) + .putAlias(AliasMetadata.builder("alias-hidden").isHidden(true)) // alias hidden + ); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + IndicesOptions.lenientExpandOpen(), // don't include hidden + SystemIndexAccessLevel.NONE + ); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + } + + { + // hidden index with visible alias should be returned + Metadata.Builder mdBuilder = Metadata.builder() + .put( + indexBuilder("index-visible-alias", true) // index hidden + .state(State.OPEN) + .putAlias(AliasMetadata.builder("alias-visible").isHidden(false)) // alias visible + ); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + IndicesOptions.lenientExpandOpen(), // don't include hidden + SystemIndexAccessLevel.NONE + ); + assertThat( + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), + equalTo(newHashSet("index-visible-alias")) + ); + } + } + + public void testAllDataStreams() { + + String dataStreamName = "foo_logs"; + long epochMillis = randomLongBetween(1580536800000L, 1583042400000L); + IndexMetadata firstBackingIndexMetadata = createBackingIndex(dataStreamName, 1, epochMillis).build(); + + IndicesOptions indicesAndAliasesOptions = IndicesOptions.fromOptions( + randomBoolean(), + randomBoolean(), + true, + false, + true, + false, + false, + false + ); + + { + // visible data streams should be returned by _all even show backing indices are hidden + Metadata.Builder mdBuilder = Metadata.builder() + .put(firstBackingIndexMetadata, true) + .put(DataStreamTestHelper.newInstance(dataStreamName, List.of(firstBackingIndexMetadata.getIndex()))); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + indicesAndAliasesOptions, + false, + false, + true, + SystemIndexAccessLevel.NONE, + NONE, + NONE + ); + + assertThat( + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), + equalTo(newHashSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) + ); + } + + { + // if data stream itself is hidden, backing indices should not be returned + boolean hidden = true; + var dataStream = new DataStream( + dataStreamName, + List.of(firstBackingIndexMetadata.getIndex()), + 1, + null, + hidden, + false, + false, + false, + null, + null, + false, + List.of(), + false, + null + ); + + Metadata.Builder mdBuilder = Metadata.builder().put(firstBackingIndexMetadata, true).put(dataStream); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + indicesAndAliasesOptions, + false, + false, + true, + SystemIndexAccessLevel.NONE, + NONE, + NONE + ); + + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + } + } + public void testResolveEmpty() { Metadata.Builder mdBuilder = Metadata.builder() .put( diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 7a3d48aad13d3..bd4aa0241cd27 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -12,10 +12,11 @@ import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.IntField; +import org.apache.lucene.document.KeywordField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.TransportVersion; @@ -31,6 +32,7 @@ import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collections; @@ -43,35 +45,51 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch814Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=0), chunkSize=14336, maxDocsPerChunk=128, blockShift=10)", + codec.storedFieldsFormat().toString() + ); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=3), chunkSize=245760, maxDocsPerChunk=2048, blockShift=10)", + codec.storedFieldsFormat().toString() + ); + } + + public void testLegacyDefault() throws Exception { + Codec codec = createCodecService().codec("legacy_default"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } - // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(null); - iwc.setCodec(actual); - IndexWriter iw = new IndexWriter(dir, iwc); - iw.addDocument(new Document()); - iw.commit(); - iw.close(); - DirectoryReader ir = DirectoryReader.open(dir); - SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); - assertNotNull(v); - assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); - ir.close(); - dir.close(); + public void testLegacyBestCompression() throws Exception { + Codec codec = createCodecService().codec("legacy_best_compression"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } private CodecService createCodecService() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index 4ce20e35869cb..74657842488b5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.codec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -63,7 +62,7 @@ public class PerFieldMapperCodecTests extends ESTestCase { """; public void testUseBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, randomBoolean(), false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, randomBoolean(), false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -71,7 +70,7 @@ public void testUseBloomFilter() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -79,13 +78,13 @@ public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled_noTimeSeriesMode() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); } public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); assertWarnings( @@ -94,28 +93,29 @@ public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() thr } public void testUseES87TSDBEncodingForTimestampField() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDoNotUseES87TSDBEncodingForTimestampFieldNonTimeSeriesIndex() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } public void testEnableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDisableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } - private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -140,31 +140,32 @@ private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeri """; mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); } - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } public void testUseES87TSDBEncodingSettingDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeDisabledCodecDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeAndCodecEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); } - private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -173,7 +174,7 @@ private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean tim settings.put(IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING.getKey(), enableES87TSDBCodec); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings.build(), "test"); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..1679813ed1340 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..5acdd4f5730e9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index d83c75455292f..726ec8561535e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -317,7 +317,8 @@ public void testDateNanoDocValues() throws IOException { "my_date", IndexNumericFieldData.NumericType.DATE_NANOSECONDS, CoreValuesSourceType.DATE, - DateNanosDocValuesField::new + DateNanosDocValuesField::new, + false ); // Read index and check the doc values DirectoryReader reader = DirectoryReader.open(w); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index 144bfa3e8701e..486b33d9b155a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -464,7 +464,11 @@ public void testDeeplyNestedMapping() throws Exception { threads[threadId] = new Thread(() -> { try { latch.await(); - mapperService.parseMapping("_doc", new CompressedXContent(Strings.toString(builders[threadId]))); + mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(Strings.toString(builders[threadId])) + ); } catch (Exception e) { throw new AssertionError(e); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 329d8a795732f..229e2e6f72cc1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -67,7 +68,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new PassThroughObjectMapper.Builder("labels").setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java new file mode 100644 index 0000000000000..8c9197b0f3173 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.test.ESTestCase; + +public class MapperBuilderContextTests extends ESTestCase { + + public void testRoot() { + MapperBuilderContext root = MapperBuilderContext.root(false, false); + assertFalse(root.isSourceSynthetic()); + assertFalse(root.isDataStream()); + assertEquals(MapperService.MergeReason.MAPPING_UPDATE, root.getMergeReason()); + } + + public void testRootWithMergeReason() { + MapperService.MergeReason mergeReason = randomFrom(MapperService.MergeReason.values()); + MapperBuilderContext root = MapperBuilderContext.root(false, false, mergeReason); + assertFalse(root.isSourceSynthetic()); + assertFalse(root.isDataStream()); + assertEquals(mergeReason, root.getMergeReason()); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java index 9c38487dbdf7b..77d3259ea1091 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java @@ -29,4 +29,10 @@ public void testAddFieldIfPossibleUnlimited() { assertTrue(context.decrementFieldBudgetIfPossible(Integer.MAX_VALUE)); } + public void testMergeReasons() { + MapperService.MergeReason mergeReason = randomFrom(MapperService.MergeReason.values()); + MapperMergeContext context = MapperMergeContext.root(false, false, mergeReason, Integer.MAX_VALUE); + assertEquals(mergeReason, context.getMapperBuilderContext().getMergeReason()); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 7f762bbfa7234..0a49907b25567 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -1707,6 +1707,93 @@ public void testExpandDottedNotationToObjectMappers() throws IOException { }"""); } + public void testMergeDottedAndNestedNotation() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "parent.child": { + "type": "keyword" + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping1, mapping2), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping2, mapping1), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "keyword" + } + } + } + } + } + }"""); + } + + public void testDottedAndNestedNotationInSameMapping() throws IOException { + CompressedXContent mapping = new CompressedXContent(""" + { + "properties": { + "parent.child": { + "type": "keyword" + }, + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + } + }"""); + } + private void assertMergeEquals(List mappingSources, String expected) throws IOException { final MapperService mapperServiceBulk = createMapperService(mapping(b -> {})); // simulates multiple component templates being merged in a composable index template diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 61d62c1e41969..25e4ccdf4d3a9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -1515,8 +1515,7 @@ public void testMergeNested() { NestedObjectMapper result = (NestedObjectMapper) firstMapper.merge( secondMapper, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertFalse(result.isIncludeInParent()); assertTrue(result.isIncludeInRoot()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 3c4aca4d36284..94a4c2ea92fbb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -75,10 +75,7 @@ public void testMergeDisabledField() { new ObjectMapper.Builder("disabled", Explicit.IMPLICIT_TRUE) ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge( - mergeWith, - MapperMergeContext.root(false, false, Long.MAX_VALUE) - ); + RootObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE)); assertFalse(((ObjectMapper) merged.getMapper("disabled")).isEnabled()); } @@ -93,8 +90,7 @@ public void testMergeEnabled() { ObjectMapper result = rootObjectMapper.merge( mergeWith, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertTrue(result.isEnabled()); } @@ -115,8 +111,7 @@ public void testMergeEnabledForRootMapper() { ObjectMapper result = firstMapper.merge( secondMapper, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertFalse(result.isEnabled()); } @@ -131,10 +126,7 @@ public void testMergeDisabledRootMapper() { Collections.singletonMap("test", new TestRuntimeField("test", "long")) ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge( - mergeWith, - MapperMergeContext.root(false, false, Long.MAX_VALUE) - ); + RootObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE)); assertFalse(merged.isEnabled()); assertEquals(1, merged.runtimeFields().size()); assertEquals("test", merged.runtimeFields().iterator().next().name()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 74b293ca7d6d6..154132c772927 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -126,6 +126,7 @@ public void testMerge() throws IOException { assertNull(mapper.mapping().getRoot().dynamic()); Mapping mergeWith = mapperService.parseMapping( "_doc", + MergeReason.MAPPING_UPDATE, new CompressedXContent(BytesReference.bytes(topMapping(b -> b.field("dynamic", "strict")))) ); Mapping merged = mapper.mapping().merge(mergeWith, reason, Long.MAX_VALUE); @@ -463,10 +464,14 @@ public void testSubobjectsCannotBeUpdated() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "object"))); DocumentMapper mapper = mapperService.documentMapper(); assertNull(mapper.mapping().getRoot().dynamic()); - Mapping mergeWith = mapperService.parseMapping("_doc", new CompressedXContent(BytesReference.bytes(fieldMapping(b -> { - b.field("type", "object"); - b.field("subobjects", "false"); - })))); + Mapping mergeWith = mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(BytesReference.bytes(fieldMapping(b -> { + b.field("type", "object"); + b.field("subobjects", "false"); + }))) + ); MapperException exception = expectThrows( MapperException.class, () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE) @@ -478,9 +483,13 @@ public void testSubobjectsCannotBeUpdatedOnRoot() throws IOException { MapperService mapperService = createMapperService(topMapping(b -> b.field("subobjects", false))); DocumentMapper mapper = mapperService.documentMapper(); assertNull(mapper.mapping().getRoot().dynamic()); - Mapping mergeWith = mapperService.parseMapping("_doc", new CompressedXContent(BytesReference.bytes(topMapping(b -> { - b.field("subobjects", true); - })))); + Mapping mergeWith = mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(BytesReference.bytes(topMapping(b -> { + b.field("subobjects", true); + }))) + ); MapperException exception = expectThrows( MapperException.class, () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 5601290fed5c7..47b8bb3be36b7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -238,4 +239,50 @@ public void testSyntheticSourceInTimeSeries() throws IOException { assertTrue(mapper.sourceMapper().isSynthetic()); assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); } + + public void testSupportsNonDefaultParameterValues() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, false).build(); + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").field("enabled", false).endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [enabled] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").array("includes", "foo").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [includes] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").array("excludes", "foo").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [excludes] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").field("mode", "disabled").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [mode=disabled] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService( + settings, + topMapping( + b -> b.startObject("_source").field("enabled", false).array("includes", "foo").array("excludes", "foo").endObject() + ) + ).documentMapper().sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameters [enabled,includes,excludes] are not allowed in source")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index e05cc92c8a76b..ec7d0a85f4486 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -20,7 +20,6 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.bytes.BytesReference; @@ -65,7 +64,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106964") public class DenseVectorFieldMapperTests extends MapperTestCase { private static final IndexVersion INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION = IndexVersions.V_8_10_0; @@ -81,23 +79,33 @@ public DenseVectorFieldMapperTests() { @Override protected void minimalMapping(XContentBuilder b) throws IOException { - indexMapping(b, true); + indexMapping(b, IndexVersion.current()); } @Override protected void minimalMapping(XContentBuilder b, IndexVersion indexVersion) throws IOException { - indexMapping(b, indexVersion.onOrAfter(DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION)); + indexMapping(b, indexVersion); } - private void indexMapping(XContentBuilder b, boolean indexedByDefault) throws IOException { + private void indexMapping(XContentBuilder b, IndexVersion indexVersion) throws IOException { b.field("type", "dense_vector").field("dims", 4); if (elementType != ElementType.FLOAT) { b.field("element_type", elementType.toString()); } - if (indexedByDefault || indexed) { + if (indexVersion.onOrAfter(DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION) || indexed) { // Serialize if it's new index version, or it was not the default for previous indices b.field("index", indexed); } + if (indexVersion.onOrAfter(DenseVectorFieldMapper.DEFAULT_TO_INT8) + && indexed + && elementType.equals(ElementType.FLOAT) + && indexOptionsSet == false) { + b.startObject("index_options"); + b.field("type", "int8_hnsw"); + b.field("m", 16); + b.field("ef_construction", 100); + b.endObject(); + } if (indexed) { b.field("similarity", "dot_product"); if (indexOptionsSet) { diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 2f31bac135716..3085ff89603ce 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -382,7 +382,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 20d5fdae5e4cf..f11d3f9b70d23 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -242,6 +242,11 @@ public LeafNumericFieldData loadDirect(LeafReaderContext context) throws Excepti protected boolean sortRequiresCustomComparator() { return false; } + + @Override + protected boolean isIndexed() { + return false; + } } private static final ScoreFunction RANDOM_SCORE_FUNCTION = new RandomScoreFunction(0, 0, new IndexFieldDataStub()); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 97bf9f4e380fa..c2706a7a3cf22 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -148,7 +148,6 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.BrokenBarrierException; @@ -401,19 +400,19 @@ public void testRunUnderPrimaryPermitDelaysToExecutorWhenBlocked() throws Except indexShard.acquireAllPrimaryOperationsPermits(onAcquired, new TimeValue(Long.MAX_VALUE, TimeUnit.NANOSECONDS)); final Releasable permit = onAcquired.actionGet(); final CountDownLatch latch = new CountDownLatch(1); - final String executorOnDelay = randomFrom( - ThreadPool.Names.FLUSH, - ThreadPool.Names.GENERIC, - ThreadPool.Names.MANAGEMENT, - ThreadPool.Names.SAME - ); + final String expectedThreadPoolName; + final Executor executorOnDelay; + if (randomBoolean()) { + expectedThreadPoolName = ThreadPool.Names.GENERIC; + executorOnDelay = EsExecutors.DIRECT_EXECUTOR_SERVICE; + } else { + expectedThreadPoolName = randomFrom(ThreadPool.Names.FLUSH, ThreadPool.Names.GENERIC, ThreadPool.Names.MANAGEMENT); + executorOnDelay = threadPool.executor(expectedThreadPoolName); + } indexShard.runUnderPrimaryPermit(() -> { - final String expectedThreadPoolName = executorOnDelay.equals(ThreadPool.Names.SAME) - ? "generic" - : executorOnDelay.toLowerCase(Locale.ROOT); - assertThat(Thread.currentThread().getName(), containsString(expectedThreadPoolName)); + assertThat(Thread.currentThread().getName(), containsString('[' + expectedThreadPoolName + ']')); latch.countDown(); - }, e -> fail(e.toString()), threadPool.executor(executorOnDelay)); + }, e -> fail(e.toString()), executorOnDelay); permit.close(); latch.await(); // we could race and assert on the count before the permit is returned diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index a52fd7e608d24..9b686417badfc 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -254,7 +255,7 @@ public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { IndexService indexService = createIndex("foo"); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> indexService.mapperService().parseMapping("type", new CompressedXContent(mapping)) + () -> indexService.mapperService().parseMapping("type", MergeReason.MAPPING_UPDATE, new CompressedXContent(mapping)) ); assertThat(e.getMessage(), equalTo("Failed to parse mapping: Unknown Similarity type [unknown_similarity] for field [field1]")); } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 9582a6e76d539..084eb94852524 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -66,7 +66,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -219,7 +218,7 @@ public void testExecuteIndexPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1127,7 +1126,7 @@ public String getType() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1172,7 +1171,7 @@ public void testExecuteBulkPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, times(1)).accept( argThat(item -> item == 2), @@ -1249,7 +1248,7 @@ public DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPar (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(wrappedObserverWasUsed.get(), equalTo(2)); assertThat(parsedValueWasUsed.get(), equalTo(2)); @@ -1284,7 +1283,7 @@ public void testExecuteSuccess() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1326,7 +1325,7 @@ public void testDynamicTemplates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); latch.await(); assertThat(indexRequest.getDynamicTemplates(), equalTo(Map.of("foo", "bar", "foo.bar", "baz"))); @@ -1356,7 +1355,7 @@ public void testExecuteEmptyPipeline() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1419,7 +1418,7 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(any(), any()); verify(failureHandler, never()).accept(any(), any()); @@ -1477,7 +1476,7 @@ public void testExecuteFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1535,7 +1534,7 @@ public void testExecuteSuccessWithOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(eq(0), any(IngestProcessorException.class)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1587,7 +1586,7 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1650,7 +1649,7 @@ public void testBulkRequestExecutionWithFailures() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, times(numIndexRequests)).accept(anyInt(), argThat(e -> e.getCause().equals(error))); @@ -1704,7 +1703,7 @@ public void testExecuteFailureRedirection() throws Exception { redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1761,7 +1760,7 @@ public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1827,7 +1826,7 @@ public void testBulkRequestExecutionWithRedirectedFailures() throws Exception { requestItemRedirectHandler, requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemRedirectHandler, times(numIndexRequests)).apply(anyInt(), anyString(), argThat(e -> e.getCause().equals(error))); @@ -1888,7 +1887,7 @@ public void testBulkRequestExecution() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, never()).accept(any(), any()); @@ -2003,7 +2002,7 @@ public String execute() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); { @@ -2083,7 +2082,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterFirstRequestStats = ingestService.stats(); assertThat(afterFirstRequestStats.pipelineStats().size(), equalTo(2)); @@ -2109,7 +2108,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterSecondRequestStats = ingestService.stats(); assertThat(afterSecondRequestStats.pipelineStats().size(), equalTo(2)); @@ -2140,7 +2139,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterThirdRequestStats = ingestService.stats(); assertThat(afterThirdRequestStats.pipelineStats().size(), equalTo(2)); @@ -2172,7 +2171,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterForthRequestStats = ingestService.stats(); assertThat(afterForthRequestStats.pipelineStats().size(), equalTo(2)); @@ -2269,7 +2268,7 @@ public String getDescription() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -2359,7 +2358,7 @@ public void testCBORParsing() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); } @@ -2439,7 +2438,7 @@ public void testSetsRawTimestamp() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(indexRequest1.getRawTimestamp(), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index 327dc3d4f5fd0..0e4818701c5f5 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -227,7 +227,7 @@ private Repository createRepository() { new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually } }; diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java index f2a11336c7f4b..15e1d479ddf9a 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; @@ -30,16 +29,9 @@ public final class RestMultiSearchActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - private RestMultiSearchAction action; - @Before public void setUpAction() { - action = new RestMultiSearchAction( - Settings.EMPTY, - new UsageService().getSearchUsageHolder(), - mock(NamedWriteableRegistry.class), - nf -> false - ); + RestMultiSearchAction action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); verifyingClient.setExecuteVerifier((actionType, request) -> mock(MultiSearchResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(MultiSearchResponse.class)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 5f641ef8fd84f..77cc94c44e151 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -37,7 +36,7 @@ public final class RestSearchActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestSearchAction(new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class), nf -> false); + action = new RestSearchAction(new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchResponse.class)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index 6ac538f6c7ce9..96ad3cd5afb22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -163,7 +163,8 @@ private void testCase( "price", IndexNumericFieldData.NumericType.DOUBLE, CoreValuesSourceType.NUMERIC, - (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n) + (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n), + false ); FunctionScoreQuery query = new FunctionScoreQuery( new MatchAllDocsQuery(), diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index a5371e7b0b00a..39e73837c83ea 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -65,7 +65,7 @@ public void testDocValueFetcher() throws IOException { when(fieldType.valueFetcher(any(), any())).thenReturn( new DocValueFetcher( DocValueFormat.RAW, - new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null) + new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null, false) ) ); when(sec.getFieldType(any())).thenReturn(fieldType); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index dafe994b502f0..185f4582e7377 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2130,7 +2130,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { recoverySettings ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo in the test thread } } diff --git a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java index 40115e1402495..4f7d900f7cdb8 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java @@ -20,8 +20,7 @@ protected final ThreadPool.Info info(final ThreadPool threadPool, final String n return info; } } - assert "same".equals(name); - return null; + return fail(null, "unknown threadpool name: " + name); } protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final String name) { @@ -30,10 +29,10 @@ protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final S return stats; } } - throw new IllegalArgumentException(name); + return fail(null, "unknown threadpool name: " + name); } - protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) throws InterruptedException { + protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) { if (threadPool != null) { terminate(threadPool); } diff --git a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 5644e0b613651..b68f3ef76bbac 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -29,7 +29,7 @@ public class UpdateThreadPoolSettingsTests extends ESThreadPoolTestCase { - public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException { + public void testCorrectThreadPoolTypePermittedInSettings() { String threadPoolName = randomThreadPoolName(); ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); ThreadPool threadPool = null; @@ -41,13 +41,7 @@ public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedExc .build(), MeterRegistry.NOOP ); - ThreadPool.Info info = info(threadPool, threadPoolName); - if (ThreadPool.Names.SAME.equals(threadPoolName)) { - assertNull(info); // we don't report on the "same" thread pool - } else { - // otherwise check we have the expected type - assertEquals(info.getThreadPoolType(), correctThreadPoolType); - } + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), correctThreadPoolType); } finally { terminateThreadPoolIfNeeded(threadPool); } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java index 87fbf113fc1c9..062cc71c9172d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java @@ -72,13 +72,13 @@ public void testHandlersCompleteAtShutdown() throws Exception { while (keepGoing.get() && requestPermits.tryAcquire()) { nodeB.transportService.sendRequest( randomFrom(random, nodeA, nodeB).transportService.getLocalNode(), - TestNode.ACTION_NAME_PREFIX + randomFrom(random, TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random), TransportRequest.Empty.INSTANCE, new TransportResponseHandler() { final AtomicBoolean completed = new AtomicBoolean(); - final String executor = randomFrom(random, TestNode.EXECUTOR_NAMES); + final Executor executor = nodeB.randomExecutor(); @Override public void handleResponse(TransportResponse.Empty response) { @@ -99,7 +99,7 @@ public TransportResponse.Empty read(StreamInput in) { @Override public Executor executor() { - return nodeB.transportService.getThreadPool().executor(executor); + return executor; } } ); @@ -130,7 +130,7 @@ public void testInternalSendExceptionForksToHandlerExecutor() { final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future, unusedReader(), deterministicTaskQueue::scheduleNow) @@ -149,7 +149,7 @@ public void testInternalSendExceptionForksToGenericIfHandlerDoesNotFork() { final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -178,7 +178,7 @@ public void testInternalSendExceptionForcesExecutionOnHandlerExecutor() { try { nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -197,14 +197,14 @@ public void testInternalSendExceptionForcesExecutionOnHandlerExecutor() { public void testInternalSendExceptionCompletesHandlerOnCallingThreadIfTransportServiceClosed() { final var nodeA = new TestNode("node-A"); - final var executor = nodeA.threadPool.executor(randomFrom(TestNode.EXECUTOR_NAMES)); + final var executor = nodeA.randomExecutor(); nodeA.close(); final var testThread = Thread.currentThread(); final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -229,6 +229,7 @@ private static E getSendRequestException(Future future, } private static class Executors { + static final String DIRECT = "direct"; static final String SCALING_DROP_ON_SHUTDOWN = "scaling-drop-on-shutdown"; static final String SCALING_REJECT_ON_SHUTDOWN = "scaling-reject-on-shutdown"; static final String FIXED_BOUNDED_QUEUE = "fixed-bounded-queue"; @@ -238,8 +239,9 @@ private static class Executors { private static class TestNode implements Releasable { static final String ACTION_NAME_PREFIX = "internal:test/"; + static final String[] EXECUTOR_NAMES = new String[] { - ThreadPool.Names.SAME, + Executors.DIRECT, Executors.SCALING_DROP_ON_SHUTDOWN, Executors.SCALING_REJECT_ON_SHUTDOWN, Executors.FIXED_BOUNDED_QUEUE, @@ -293,10 +295,10 @@ public ExecutorService executor(String name) { null, emptySet() ); - for (final var executor : EXECUTOR_NAMES) { + for (final var executorName : EXECUTOR_NAMES) { transportService.registerRequestHandler( - ACTION_NAME_PREFIX + executor, - threadPool.executor(executor), + ACTION_NAME_PREFIX + executorName, + getExecutor(executorName), TransportRequest.Empty::new, (request, channel, task) -> { if (randomBoolean()) { @@ -311,6 +313,18 @@ public ExecutorService executor(String name) { transportService.acceptIncomingRequests(); } + Executor getExecutor(String executorName) { + return executorName.equals(Executors.DIRECT) ? EsExecutors.DIRECT_EXECUTOR_SERVICE : threadPool.executor(executorName); + } + + Executor randomExecutor() { + return getExecutor(randomFrom(TestNode.EXECUTOR_NAMES)); + } + + static String randomActionName(Random random) { + return ACTION_NAME_PREFIX + randomFrom(random, EXECUTOR_NAMES); + } + @Override public void close() { transportService.stop(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 2980b8a48636a..c83caa617e16e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -143,6 +143,7 @@ public static DataStream newInstance( lifecycle, false, List.of(), + false, autoShardingEvent ); } @@ -169,6 +170,7 @@ public static DataStream newInstance( lifecycle, failureStores.size() > 0, failureStores, + false, null ); } @@ -352,13 +354,14 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time ); } + boolean replicated = randomBoolean(); return new DataStream( dataStreamName, indices, generation, metadata, randomBoolean(), - randomBoolean(), + replicated, false, // Some tests don't work well with system data streams, since these data streams require special handling timeProvider, randomBoolean(), @@ -366,7 +369,7 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time randomBoolean() ? DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() : null, failureStore, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), randomBoolean() ? new DataStreamAutoShardingEvent( indices.get(indices.size() - 1).getName(), diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java index e89a6c8a84bf7..1fac5a9917807 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java @@ -379,7 +379,7 @@ public ExecutorService generic() { @Override public ExecutorService executor(String name) { - return Names.SAME.equals(name) ? EsExecutors.DIRECT_EXECUTOR_SERVICE : forkingExecutor; + return forkingExecutor; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 09c6eed08bf28..620db8dc83510 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -10,7 +10,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; @@ -43,6 +42,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.codec.PerFieldMapperCodec; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -243,7 +243,7 @@ protected static void withLuceneIndex( CheckedConsumer test ) throws IOException { IndexWriterConfig iwc = new IndexWriterConfig(IndexShard.buildIndexAnalyzer(mapperService)).setCodec( - new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) ); try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc)) { builder.accept(iw); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index 77391aadaa554..1b00ba3e9fd09 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -120,7 +121,7 @@ public final void testUnsupportedParametersAreRejected() throws IOException { + "}"; MapperParsingException exception = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString)) + () -> mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString)) ); assertEquals( "Failed to parse mapping: unknown parameter [anything] on metadata field [" + fieldName() + "]", @@ -136,7 +137,7 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { String mappingAsString = "{\n" + " \"_doc\" : {\n" + " \"" + fieldName() + "\" : {\n" + " }\n" + " }\n" + "}"; MapperParsingException exception = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString)) + () -> mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString)) ); assertEquals("Failed to parse mapping: " + fieldName() + " is not configurable", exception.getMessage()); } @@ -161,7 +162,7 @@ public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { + " }\n" + " }\n" + "}"; - assertNotNull(mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString))); + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); } } @@ -184,7 +185,7 @@ public void testTypeAndFriendsAreDeprecatedFrom_8_6_0() throws IOException { + " }\n" + " }\n" + "}"; - assertNotNull(mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString))); + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); assertWarnings("Parameter [" + param + "] has no effect on metadata field [" + fieldName() + "] and will be removed in future"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java index ce8e3a2574f3e..e2fa31c31a46f 100644 --- a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java +++ b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java @@ -9,23 +9,14 @@ package org.elasticsearch.threadpool; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.node.Node; import org.elasticsearch.telemetry.metric.MeterRegistry; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; public class TestThreadPool extends ThreadPool implements Releasable { - private final CountDownLatch blockingLatch = new CountDownLatch(1); - private volatile boolean returnRejectingExecutor = false; - private volatile ThreadPoolExecutor rejectingExecutor; - public TestThreadPool(String name, ExecutorBuilder... customBuilders) { this(name, Settings.EMPTY, customBuilders); } @@ -34,74 +25,6 @@ public TestThreadPool(String name, Settings settings, ExecutorBuilder... cust super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), MeterRegistry.NOOP, customBuilders); } - @Override - public ExecutorService executor(String name) { - if (returnRejectingExecutor) { - return rejectingExecutor; - } else { - return super.executor(name); - } - } - - public void startForcingRejections() { - if (rejectingExecutor == null) { - createRejectingExecutor(); - } - returnRejectingExecutor = true; - } - - public void stopForcingRejections() { - returnRejectingExecutor = false; - } - - @Override - public void shutdown() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdown(); - } - super.shutdown(); - } - - @Override - public void shutdownNow() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdownNow(); - } - super.shutdownNow(); - } - - private synchronized void createRejectingExecutor() { - if (rejectingExecutor != null) { - return; - } - ThreadFactory factory = EsExecutors.daemonThreadFactory("reject_thread"); - rejectingExecutor = EsExecutors.newFixed( - "rejecting", - 1, - 0, - factory, - getThreadContext(), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - - CountDownLatch startedLatch = new CountDownLatch(1); - rejectingExecutor.execute(() -> { - try { - startedLatch.countDown(); - blockingLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); - try { - startedLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - @Override public void close() { ThreadPool.terminate(this, 10, TimeUnit.SECONDS); diff --git a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java index f4677dc603e64..0e79dfa6e1e79 100644 --- a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java +++ b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java @@ -443,20 +443,4 @@ public void testThreadPoolSchedulesPeriodicFutureTasks() { assertThat(strings, contains("periodic-0", "periodic-1", "periodic-2")); } - public void testSameExecutor() { - final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(); - final ThreadPool threadPool = taskQueue.getThreadPool(); - final AtomicBoolean executed = new AtomicBoolean(false); - final AtomicBoolean executedNested = new AtomicBoolean(false); - threadPool.generic().execute(() -> { - final var executor = threadPool.executor(ThreadPool.Names.SAME); - assertSame(EsExecutors.DIRECT_EXECUTOR_SERVICE, executor); - executor.execute(() -> assertTrue(executedNested.compareAndSet(false, true))); - assertThat(executedNested.get(), is(true)); - assertTrue(executed.compareAndSet(false, true)); - }); - taskQueue.runAllRunnableTasks(); - assertThat(executed.get(), is(true)); - } - } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index b09250e1527f3..8a6dada181c6c 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -149,8 +149,9 @@ public boolean isError() { /** * Parses the response body and extracts a specific value from it (identified by the provided path) */ - public Object evaluate(String path) throws IOException { - return evaluate(path, Stash.EMPTY); + @SuppressWarnings("unchecked") + public T evaluate(String path) throws IOException { + return (T) evaluate(path, Stash.EMPTY); } /** diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 5ac83f94f6248..a32679d445629 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -469,8 +469,7 @@ static String readOsFromNodesInfo(RestClient restClient) throws IOException { ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); SortedSet osPrettyNames = new TreeSet<>(); - @SuppressWarnings("unchecked") - final Map nodes = (Map) restTestResponse.evaluate("nodes"); + final Map nodes = restTestResponse.evaluate("nodes"); for (Entry node : nodes.entrySet()) { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 3e321d57d877c..0f732d2017c74 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -439,44 +439,6 @@ public InternalMultiTerms create(List buckets) { ); } - /** - * Checks if any keys need to be promoted to double from long or unsigned_long - */ - private boolean[] needsPromotionToDouble(List aggregations) { - if (aggregations.size() < 2) { - return null; - } - boolean[] promotions = null; - - for (int i = 0; i < keyConverters.size(); i++) { - boolean hasLong = false; - boolean hasUnsignedLong = false; - boolean hasDouble = false; - boolean hasNonNumber = false; - for (InternalAggregation aggregation : aggregations) { - InternalMultiTerms agg = (InternalMultiTerms) aggregation; - KeyConverter keyConverter = agg.keyConverters.get(i); - switch (keyConverter) { - case DOUBLE -> hasDouble = true; - case LONG -> hasLong = true; - case UNSIGNED_LONG -> hasUnsignedLong = true; - default -> hasNonNumber = true; - } - } - if (hasNonNumber && (hasDouble || hasUnsignedLong || hasLong)) { - throw AggregationErrors.reduceTypeMismatch(name, Optional.of(i + 1)); - } - // Promotion to double is required if at least 2 of these 3 conditions are true. - if ((hasDouble ? 1 : 0) + (hasUnsignedLong ? 1 : 0) + (hasLong ? 1 : 0) > 1) { - if (promotions == null) { - promotions = new boolean[keyConverters.size()]; - } - promotions[i] = true; - } - } - return promotions; - } - private InternalAggregation promoteToDouble(InternalAggregation aggregation, boolean[] needsPromotion) { InternalMultiTerms multiTerms = (InternalMultiTerms) aggregation; List multiTermsBuckets = multiTerms.getBuckets(); @@ -539,33 +501,78 @@ private InternalAggregation promoteToDouble(InternalAggregation aggregation, boo ); } - public List getProcessedAggs(List aggregations, boolean[] needsPromotionToDouble) { - if (needsPromotionToDouble != null) { - List newAggs = new ArrayList<>(aggregations.size()); - for (InternalAggregation agg : aggregations) { - newAggs.add(promoteToDouble(agg, needsPromotionToDouble)); - } - return newAggs; - } else { - return aggregations; - } - } - @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final List aggregations = new ArrayList<>(size); + private List aggregations = new ArrayList<>(size); @Override public void accept(InternalAggregation aggregation) { aggregations.add(aggregation); } + private List getProcessedAggs(List aggregations, boolean[] needsPromotionToDouble) { + if (needsPromotionToDouble != null) { + aggregations.replaceAll(agg -> promoteToDouble(agg, needsPromotionToDouble)); + } + return aggregations; + } + + /** + * Checks if any keys need to be promoted to double from long or unsigned_long + */ + private boolean[] needsPromotionToDouble(List aggregations) { + if (aggregations.size() < 2) { + return null; + } + boolean[] promotions = null; + + for (int i = 0; i < keyConverters.size(); i++) { + boolean hasLong = false; + boolean hasUnsignedLong = false; + boolean hasDouble = false; + boolean hasNonNumber = false; + for (InternalAggregation aggregation : aggregations) { + InternalMultiTerms agg = (InternalMultiTerms) aggregation; + KeyConverter keyConverter = agg.keyConverters.get(i); + switch (keyConverter) { + case DOUBLE -> hasDouble = true; + case LONG -> hasLong = true; + case UNSIGNED_LONG -> hasUnsignedLong = true; + default -> hasNonNumber = true; + } + } + if (hasNonNumber && (hasDouble || hasUnsignedLong || hasLong)) { + throw AggregationErrors.reduceTypeMismatch(name, Optional.of(i + 1)); + } + // Promotion to double is required if at least 2 of these 3 conditions are true. + if ((hasDouble ? 1 : 0) + (hasUnsignedLong ? 1 : 0) + (hasLong ? 1 : 0) > 1) { + if (promotions == null) { + promotions = new boolean[keyConverters.size()]; + } + promotions[i] = true; + } + } + return promotions; + } + @Override public InternalAggregation get() { - List processed = getProcessedAggs(aggregations, needsPromotionToDouble(aggregations)); - return ((AbstractInternalTerms) processed.get(0)).doReduce(processed, reduceContext); + final boolean[] needsPromotionToDouble = needsPromotionToDouble(aggregations); + if (needsPromotionToDouble != null) { + aggregations.replaceAll(agg -> promoteToDouble(agg, needsPromotionToDouble)); + } + try ( + AggregatorReducer processor = ((AbstractInternalTerms) aggregations.get(0)).termsAggregationReducer( + reduceContext, + size + ) + ) { + aggregations.forEach(processor::accept); + aggregations = null; // release memory + return processor.get(); + } } }; } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java index c551312f68c0b..b719d4ca3bf82 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java @@ -57,7 +57,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), + new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature), new RestGetAsyncSearchAction(), new RestGetAsyncStatusAction(), new RestDeleteAsyncSearchAction() diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java index d98677d456b90..bd09d8f7740a1 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.search; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -37,16 +36,10 @@ public final class RestSubmitAsyncSearchAction extends BaseRestHandler { static final Set RESPONSE_PARAMS = Collections.singleton(TYPED_KEYS_PARAM); private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSubmitAsyncSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestSubmitAsyncSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -69,15 +62,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli // them as supported. We rely on SubmitAsyncSearchRequest#validate to fail in case they are set. // Note that ccs_minimize_roundtrips is also set this way, which is a supported option. request.withContentOrSourceParamParserOrNull( - parser -> parseSearchRequest( - submit.getSearchRequest(), - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ) + parser -> parseSearchRequest(submit.getSearchRequest(), request, parser, clusterSupportsFeature, setSize, searchUsageHolder) ); if (request.hasParam("wait_for_completion_timeout")) { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java index fe6ed8b57d1e0..cc1d4f4e6270d 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java @@ -8,7 +8,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; @@ -27,19 +26,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Mockito.mock; public class RestSubmitAsyncSearchActionTests extends RestActionTestCase { - private RestSubmitAsyncSearchAction action; - @Before public void setUpAction() { - action = new RestSubmitAsyncSearchAction( - new UsageService().getSearchUsageHolder(), - mock(NamedWriteableRegistry.class), - nf -> false - ); + RestSubmitAsyncSearchAction action = new RestSubmitAsyncSearchAction(new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 446e9abcd3e26..b3c059e933fcf 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -341,6 +341,9 @@ static DataStream updateLocalDataStream( remoteDataStream.getLifecycle(), remoteDataStream.isFailureStore(), remoteDataStream.getFailureIndices(), + // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense + // (and potentially even break things). + false, remoteDataStream.getAutoShardingEvent() ); } else { @@ -395,6 +398,7 @@ static DataStream updateLocalDataStream( localDataStream.getLifecycle(), localDataStream.isFailureStore(), localDataStream.getFailureIndices(), + localDataStream.rolloverOnWrite(), localDataStream.getAutoShardingEvent() ); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java index bc97623c76970..c1b4a4bf27890 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java @@ -122,19 +122,21 @@ public void testAction() throws Exception { indices.add(index); } boolean systemDataStream = randomBoolean(); + boolean replicated = randomBoolean(); DataStream dataStream = new DataStream( randomAlphaOfLength(50), indices, randomLongBetween(0, 1000), Map.of(), systemDataStream || randomBoolean(), - randomBoolean(), + replicated, systemDataStream, randomBoolean(), IndexMode.STANDARD, lifecycle, false, List.of(), + replicated == false && randomBoolean(), null ); dataStreamMap.put(dataStream.getName(), dataStream); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java index 58e7be10cfa81..b0a162eb3ec14 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java @@ -17,7 +17,7 @@ public class GetBasicStatusResponse extends ActionResponse implements ToXContentObject { - private boolean eligibleToStartBasic; + private final boolean eligibleToStartBasic; GetBasicStatusResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java index 9126d22f33250..5bceab8edbe92 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java @@ -17,7 +17,7 @@ public class GetTrialStatusResponse extends ActionResponse implements ToXContentObject { - private boolean eligibleToStartTrial; + private final boolean eligibleToStartTrial; GetTrialStatusResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java index 853c3d39e4121..87b49f3ef9e82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java @@ -48,9 +48,9 @@ RestStatus getRestStatus() { } - private Status status; - private Map acknowledgeMessages; - private String acknowledgeMessage; + private final Status status; + private final Map acknowledgeMessages; + private final String acknowledgeMessage; PostStartTrialResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 34126064997d6..5ba0e584d63bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -38,11 +38,11 @@ public class XPackInfoResponse extends ActionResponse implements ToXContentObjec // TODO move this constant to License.java once we move License.java to the protocol jar @Nullable - private BuildInfo buildInfo; + private final BuildInfo buildInfo; @Nullable - private LicenseInfo licenseInfo; + private final LicenseInfo licenseInfo; @Nullable - private FeatureSetsInfo featureSetsInfo; + private final FeatureSetsInfo featureSetsInfo; public XPackInfoResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java index 5bf5ecb445c57..ce872b1e406ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; @@ -23,8 +22,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; - /** * Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects * (nodes and edges in common graph parlance). @@ -43,40 +40,6 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb public GraphExploreResponse() {} - public GraphExploreResponse(StreamInput in) throws IOException { - super(in); - tookInMillis = in.readVLong(); - timedOut = in.readBoolean(); - - int size = in.readVInt(); - if (size == 0) { - shardFailures = ShardSearchFailure.EMPTY_ARRAY; - } else { - shardFailures = new ShardSearchFailure[size]; - for (int i = 0; i < shardFailures.length; i++) { - shardFailures[i] = readShardSearchFailure(in); - } - } - // read vertices - size = in.readVInt(); - vertices = new HashMap<>(); - for (int i = 0; i < size; i++) { - Vertex n = Vertex.readFrom(in); - vertices.put(n.getId(), n); - } - - size = in.readVInt(); - - connections = new HashMap<>(); - for (int i = 0; i < size; i++) { - Connection e = new Connection(in, vertices); - connections.put(e.getId(), e); - } - - returnDetailedInfo = in.readBoolean(); - - } - public GraphExploreResponse( long tookInMillis, boolean timedOut, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java index c6d673aec7d2a..ffeb0867723e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,13 +28,6 @@ public DeleteWatchResponse(String id, long version, boolean found) { this.found = found; } - public DeleteWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - version = in.readVLong(); - found = in.readBoolean(); - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java index 5c1f53bef3ef0..3bc3ebbd6f6f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java @@ -7,9 +7,7 @@ package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -18,22 +16,11 @@ public class PutWatchResponse extends ActionResponse implements ToXContentObject { - private String id; - private long version; - private long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - private long primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM; - private boolean created; - - public PutWatchResponse() {} - - public PutWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - version = in.readVLong(); - seqNo = in.readZLong(); - primaryTerm = in.readVLong(); - created = in.readBoolean(); - } + private final String id; + private final long version; + private final long seqNo; + private final long primaryTerm; + private final boolean created; public PutWatchResponse(String id, long version, long seqNo, long primaryTerm, boolean created) { this.id = id; @@ -43,26 +30,6 @@ public PutWatchResponse(String id, long version, long seqNo, long primaryTerm, b this.created = created; } - private void setId(String id) { - this.id = id; - } - - private void setVersion(long version) { - this.version = version; - } - - private void setSeqNo(long seqNo) { - this.seqNo = seqNo; - } - - private void setPrimaryTerm(long primaryTerm) { - this.primaryTerm = primaryTerm; - } - - private void setCreated(boolean created) { - this.created = created; - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java index 5d066a4dc6c50..66037054da685 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; @@ -15,12 +14,7 @@ public class XPackInfoFeatureResponse extends ActionResponse { - private FeatureSet info; - - public XPackInfoFeatureResponse(StreamInput in) throws IOException { - super(in); - info = new FeatureSet(in); - } + private final FeatureSet info; public XPackInfoFeatureResponse(FeatureSet info) { this.info = info; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java index 33dec1371dc86..71bb9993f3a29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java @@ -15,7 +15,7 @@ public class XPackUsageFeatureResponse extends ActionResponse { - private XPackFeatureSet.Usage usage; + private final XPackFeatureSet.Usage usage; public XPackUsageFeatureResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java index 97c7d6d8cb60d..755851b2ec88c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java @@ -30,7 +30,7 @@ public class ExplainLifecycleResponse extends ActionResponse implements ToXConte public static final ParseField INDICES_FIELD = new ParseField("indices"); - private Map indexResponses; + private final Map indexResponses; public ExplainLifecycleResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java index 9c07db9841e23..95a1bf8493e42 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; - /** * Enum representing the different modes that Index Lifecycle Service can operate in. */ @@ -24,7 +22,7 @@ public boolean isValidChange(OperationMode nextMode) { }, /** - * this represents a state where only sensitive actions (like {@link ShrinkAction}) will be executed + * this represents a state where only sensitive actions (like {@link ShrinkStep}) will be executed * until they finish, at which point the operation mode will move to STOPPED. */ STOPPING { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java index 97d1fbf524963..d40220db794b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java @@ -40,7 +40,7 @@ protected GetLifecycleAction() { public static class Response extends ActionResponse implements ChunkedToXContentObject { - private List policies; + private final List policies; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java index f70510de382a9..c3022adb2f60a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java @@ -29,7 +29,7 @@ protected GetStatusAction() { public static class Response extends ActionResponse implements ToXContentObject { - private OperationMode mode; + private final OperationMode mode; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java index 68537fba3bfd1..e1171d9ab7dd3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java @@ -47,7 +47,7 @@ public static class Response extends ActionResponse implements ToXContentObject PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), HAS_FAILURES_FIELD); } - private List failedIndexes; + private final List failedIndexes; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index 5cc1c98c6d89b..4617d1f6bccaa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -86,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { public ActionRequestValidationException validate() { ActionRequestValidationException validationException = new ActionRequestValidationException(); if (MlStrings.isValidId(this.inferenceEntityId) == false) { - validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "model_id", this.inferenceEntityId)); + validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "inference_id", this.inferenceEntityId)); } if (validationException.validationErrors().isEmpty() == false) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java index 298b6e71fc855..9a350c3c68adb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java @@ -188,8 +188,8 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, public static class Response extends ActionResponse implements ToXContentObject { - private String evaluationName; - private List metrics; + private final String evaluationName; + private final List metrics; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java index 1f1eb69ce606c..94bb7047bfe23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java @@ -47,7 +47,7 @@ public ActionRequestValidationException validate() { public static class Response extends ActionResponse implements ToXContentObject { - private Map info; + private final Map info; public Response(Map info) { this.info = info; @@ -57,11 +57,6 @@ public Response() { this.info = Collections.emptyMap(); } - public Response(StreamInput in) throws IOException { - super(in); - info = in.readGenericMap(); - } - public Map getInfo() { return info; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java index 6ca201fd8034a..d58b699fb6555 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java @@ -120,12 +120,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private List scheduledEvents; - - public Response(StreamInput in) throws IOException { - super(in); - in.readCollectionAsList(ScheduledEvent::new); - } + private final List scheduledEvents; public Response(List scheduledEvents) { this.scheduledEvents = scheduledEvents; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index d03a6d5c0c7c5..7988f885a27da 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -298,11 +298,6 @@ public static class Response extends ActionResponse implements ToXContentObject private final BytesReference preview; - public Response(StreamInput in) throws IOException { - super(in); - preview = in.readBytesReference(); - } - public Response(BytesReference preview) { this.preview = preview; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java index 755e610c4000c..eab89bb86022a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java @@ -117,12 +117,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private Calendar calendar; - - public Response(StreamInput in) throws IOException { - super(in); - calendar = new Calendar(in); - } + private final Calendar calendar; public Response(Calendar calendar) { this.calendar = calendar; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index c9da8aa4dd579..fe26cdb0377fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -156,14 +156,12 @@ public int hashCode() { public static class Response extends ActionResponse implements ToXContentObject { - private DataFrameAnalyticsConfig config; + private final DataFrameAnalyticsConfig config; public Response(DataFrameAnalyticsConfig config) { this.config = config; } - Response() {} - public Response(StreamInput in) throws IOException { super(in); config = new DataFrameAnalyticsConfig(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 67b1b2f9087e3..c234b24be5a9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -83,7 +83,7 @@ public int hashCode() { public static class Response extends ActionResponse implements ToXContentObject { - private DatafeedConfig datafeed; + private final DatafeedConfig datafeed; public Response(DatafeedConfig datafeed) { this.datafeed = datafeed; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 50216b72f20d6..5b8dae53840b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -99,9 +99,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private MlFilter filter; - - Response() {} + private final MlFilter filter; Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java index dd356b8ab41ff..a5c8e10496b3b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java @@ -30,12 +30,10 @@ public final class DelegatePkiAuthenticationResponse extends ActionResponse impl private static final ParseField EXPIRES_IN_FIELD = new ParseField("expires_in"); private static final ParseField AUTHENTICATION = new ParseField("authentication"); - private String accessToken; - private TimeValue expiresIn; + private final String accessToken; + private final TimeValue expiresIn; private Authentication authentication; - DelegatePkiAuthenticationResponse() {} - public DelegatePkiAuthenticationResponse(String accessToken, TimeValue expiresIn, Authentication authentication) { this.accessToken = Objects.requireNonNull(accessToken); // always store expiration in seconds because this is how we "serialize" to JSON and we need to parse back diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java index a1ed1c6092df8..334b395a05b31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ToXContentObject; @@ -24,11 +23,6 @@ public UpdateApiKeyResponse(boolean updated) { this.updated = updated; } - public UpdateApiKeyResponse(StreamInput in) throws IOException { - super(in); - this.updated = in.readBoolean(); - } - public boolean isUpdated() { return updated; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java index 936a2892a6dbe..92b27826e8759 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -16,11 +15,11 @@ import java.io.IOException; public class OpenIdConnectAuthenticateResponse extends ActionResponse { - private String principal; - private String accessTokenString; - private String refreshTokenString; - private TimeValue expiresIn; - private Authentication authentication; + private final String principal; + private final String accessTokenString; + private final String refreshTokenString; + private final TimeValue expiresIn; + private final Authentication authentication; public OpenIdConnectAuthenticateResponse( Authentication authentication, @@ -36,17 +35,6 @@ public OpenIdConnectAuthenticateResponse( this.authentication = authentication; } - public OpenIdConnectAuthenticateResponse(StreamInput in) throws IOException { - super(in); - principal = in.readString(); - accessTokenString = in.readString(); - refreshTokenString = in.readString(); - expiresIn = in.readTimeValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(in); - } - } - public String getPrincipal() { return principal; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java index 3dbfccf418c10..681ba15896778 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java @@ -7,19 +7,13 @@ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; public final class OpenIdConnectLogoutResponse extends ActionResponse { - private String endSessionUrl; - - public OpenIdConnectLogoutResponse(StreamInput in) throws IOException { - super(in); - this.endSessionUrl = in.readString(); - } + private final String endSessionUrl; public OpenIdConnectLogoutResponse(String endSessionUrl) { this.endSessionUrl = endSessionUrl; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index 88d8de80fe7a1..5dcfadd3dd01c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,19 +20,19 @@ */ public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse implements ToXContentObject { - private String authenticationRequestUrl; + private final String authenticationRequestUrl; /* * The oAuth2 state parameter used for CSRF protection. */ - private String state; + private final String state; /* * String value used to associate a Client session with an ID Token, and to mitigate replay attacks. */ - private String nonce; + private final String nonce; /* * String value: name of the realm used to perform authentication. */ - private String realmName; + private final String realmName; public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state, String nonce, String realmName) { this.authenticationRequestUrl = authorizationEndpointUrl; @@ -42,16 +41,6 @@ public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUr this.realmName = realmName; } - public OpenIdConnectPrepareAuthenticationResponse(StreamInput in) throws IOException { - super(in); - authenticationRequestUrl = in.readString(); - state = in.readString(); - nonce = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - realmName = in.readString(); - } - } - public String getAuthenticationRequestUrl() { return authenticationRequestUrl; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java index 5f8755ef0c0da..7db9b26cfaa60 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java @@ -20,7 +20,7 @@ */ public final class GetPrivilegesResponse extends ActionResponse { - private ApplicationPrivilegeDescriptor[] privileges; + private final ApplicationPrivilegeDescriptor[] privileges; public GetPrivilegesResponse(ApplicationPrivilegeDescriptor... privileges) { this.privileges = Objects.requireNonNull(privileges, "Application privileges cannot be null"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java index 8b8f905e59cbf..9d031d7c9065b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,11 +22,6 @@ public ActivateProfileResponse(Profile profile) { this.profile = profile; } - public ActivateProfileResponse(StreamInput in) throws IOException { - super(in); - this.profile = new Profile(in); - } - public Profile getProfile() { return profile; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java index 2dbf6743a5fde..77a411ad477f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,12 +28,6 @@ public GetProfilesResponse(List profiles, Map errors this.errors = Objects.requireNonNull(errors); } - public GetProfilesResponse(StreamInput in) throws IOException { - super(in); - this.profiles = in.readCollectionAsImmutableList(Profile::new); - this.errors = in.readMap(StreamInput::readException); - } - public List getProfiles() { return profiles; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0574bb4b100a5..6eaeb4f02ac7f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -30,13 +30,6 @@ public SuggestProfilesResponse(ProfileHit[] profileHits, long tookInMillis, Tota this.totalHits = totalHits; } - public SuggestProfilesResponse(StreamInput in) throws IOException { - super(in); - this.profileHits = in.readArray(ProfileHit::new, ProfileHit[]::new); - this.tookInMillis = in.readVLong(); - this.totalHits = Lucene.readTotalHits(in); - } - public ProfileHit[] getProfileHits() { return profileHits; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java index 42b672cca6ad8..807c55643b425 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.role; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -20,12 +19,7 @@ */ public class PutRoleResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutRoleResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutRoleResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java index 12393213fa740..87e7f3785015f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -22,11 +21,6 @@ public class DeleteRoleMappingResponse extends ActionResponse implements ToXCont private boolean found = false; - public DeleteRoleMappingResponse(StreamInput in) throws IOException { - super(in); - found = in.readBoolean(); - } - public DeleteRoleMappingResponse(boolean found) { this.found = found; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java index 51689af1d7bc6..13a751829797f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -20,16 +19,7 @@ */ public class GetRoleMappingsResponse extends ActionResponse { - private ExpressionRoleMapping[] mappings; - - public GetRoleMappingsResponse(StreamInput in) throws IOException { - super(in); - int size = in.readVInt(); - mappings = new ExpressionRoleMapping[size]; - for (int i = 0; i < size; i++) { - mappings[i] = new ExpressionRoleMapping(in); - } - } + private final ExpressionRoleMapping[] mappings; public GetRoleMappingsResponse(ExpressionRoleMapping... mappings) { this.mappings = mappings; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java index d04b0bbe1195f..5a80736dab66d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,12 +20,7 @@ */ public class PutRoleMappingResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutRoleMappingResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutRoleMappingResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java index 2cb0a76c2d6bf..71b5e93e60a2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -21,26 +20,12 @@ */ public final class SamlAuthenticateResponse extends ActionResponse { - private String principal; - private String tokenString; - private String refreshToken; - private String realm; - private TimeValue expiresIn; - private Authentication authentication; - - public SamlAuthenticateResponse(StreamInput in) throws IOException { - super(in); - principal = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { - realm = in.readString(); - } - tokenString = in.readString(); - refreshToken = in.readString(); - expiresIn = in.readTimeValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(in); - } - } + private final String principal; + private final String tokenString; + private final String refreshToken; + private final String realm; + private final TimeValue expiresIn; + private final Authentication authentication; public SamlAuthenticateResponse(Authentication authentication, String tokenString, String refreshToken, TimeValue expiresIn) { this.principal = authentication.getEffectiveSubject().getUser().principal(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java index 097d38c4f886f..42956cb34033d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,16 +16,9 @@ */ public final class SamlInvalidateSessionResponse extends ActionResponse { - private String realmName; - private int count; - private String redirectUrl; - - public SamlInvalidateSessionResponse(StreamInput in) throws IOException { - super(in); - realmName = in.readString(); - count = in.readInt(); - redirectUrl = in.readString(); - } + private final String realmName; + private final int count; + private final String redirectUrl; public SamlInvalidateSessionResponse(String realmName, int count, String redirectUrl) { this.realmName = realmName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java index 0c94e9a372481..8c3e8bd64b9bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -20,12 +19,6 @@ public final class SamlLogoutResponse extends ActionResponse { private final String requestId; private final String redirectUrl; - public SamlLogoutResponse(StreamInput in) throws IOException { - super(in); - requestId = in.readString(); - redirectUrl = in.readString(); - } - public SamlLogoutResponse(String requestId, String redirectUrl) { this.requestId = requestId; this.redirectUrl = redirectUrl; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java index 19f50266e5a51..9c7539361837e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,14 +16,9 @@ */ public final class SamlPrepareAuthenticationResponse extends ActionResponse { - private String realmName; - private String requestId; - private String redirectUrl; - - public SamlPrepareAuthenticationResponse(StreamInput in) throws IOException { - super(in); - redirectUrl = in.readString(); - } + private final String realmName; + private final String requestId; + private final String redirectUrl; public SamlPrepareAuthenticationResponse(String realmName, String requestId, String redirectUrl) { this.realmName = realmName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java index f0cce0ef5e675..b47c47d4d83d4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,12 +20,7 @@ public String getXMLString() { return XMLString; } - private String XMLString; - - public SamlSpMetadataResponse(StreamInput in) throws IOException { - super(in); - XMLString = in.readString(); - } + private final String XMLString; public SamlSpMetadataResponse(String XMLString) { this.XMLString = XMLString; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java index 5443deac03bd9..c8b7a1ea04e36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java @@ -18,7 +18,7 @@ public class DeleteServiceAccountTokenResponse extends ActionResponse implements ToXContentObject { - private boolean found; + private final boolean found; public DeleteServiceAccountTokenResponse(boolean found) { this.found = found; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java index 73719c7cae489..30522e3389a8a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java @@ -25,15 +25,13 @@ */ public final class CreateTokenResponse extends ActionResponse implements ToXContentObject { - private String tokenString; - private TimeValue expiresIn; - private String scope; - private String refreshToken; - private String kerberosAuthenticationResponseToken; + private final String tokenString; + private final TimeValue expiresIn; + private final String scope; + private final String refreshToken; + private final String kerberosAuthenticationResponseToken; private Authentication authentication; - CreateTokenResponse() {} - public CreateTokenResponse(StreamInput in) throws IOException { super(in); tokenString = in.readString(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java index 4b07a3db7a038..ec34d54b0d56e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.user; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -20,12 +19,7 @@ */ public class DeleteUserResponse extends ActionResponse implements ToXContentObject { - private boolean found; - - public DeleteUserResponse(StreamInput in) throws IOException { - super(in); - found = in.readBoolean(); - } + private final boolean found; public DeleteUserResponse(boolean found) { this.found = found; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java index 6395d2a090afa..c7f51b21f4920 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java @@ -8,13 +8,11 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.user.InternalUser; import org.elasticsearch.xpack.core.security.user.User; import java.io.IOException; @@ -30,30 +28,6 @@ public class GetUsersResponse extends ActionResponse implements ToXContentObject @Nullable private final Map profileUidLookup; - public GetUsersResponse(StreamInput in) throws IOException { - super(in); - int size = in.readVInt(); - if (size < 0) { - users = null; - } else { - users = new User[size]; - for (int i = 0; i < size; i++) { - final User user = Authentication.AuthenticationSerializationHelper.readUserFrom(in); - assert false == user instanceof InternalUser : "should not get internal user [" + user + "]"; - users[i] = user; - } - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_5_0)) { - if (in.readBoolean()) { - profileUidLookup = in.readMap(StreamInput::readString); - } else { - profileUidLookup = null; - } - } else { - profileUidLookup = null; - } - } - public GetUsersResponse(Collection users) { this(users, null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java index e59f588ffd65c..6c83d4b38ff89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java @@ -27,11 +27,11 @@ * Response for a {@link HasPrivilegesRequest} */ public class HasPrivilegesResponse extends ActionResponse implements ToXContentObject { - private String username; - private boolean completeMatch; - private Map cluster; - private Set index; - private Map> application; + private final String username; + private final boolean completeMatch; + private final Map cluster; + private final Set index; + private final Map> application; public HasPrivilegesResponse() { this("", true, Collections.emptyMap(), Collections.emptyList(), Collections.emptyMap()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java index 8e8ff50e5b4ac..9977ad459b8fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java @@ -21,7 +21,7 @@ public class ProfileHasPrivilegesResponse extends ActionResponse implements ToXContentObject { - private Set hasPrivilegeUids; + private final Set hasPrivilegeUids; private final Map errors; public ProfileHasPrivilegesResponse(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java index 86a25f8321176..fb6e699cd34c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.user; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,12 +20,7 @@ */ public class PutUserResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutUserResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutUserResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index cbb747272eebc..32d9725a909c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; /** @@ -52,16 +51,7 @@ public ActionRequestValidationException validate() { public static class Response extends ActionResponse implements ToXContentObject { - private Collection certificates; - - public Response(StreamInput in) throws IOException { - super(in); - this.certificates = new ArrayList<>(); - int count = in.readVInt(); - for (int i = 0; i < count; i++) { - certificates.add(new CertificateInfo(in)); - } - } + private final Collection certificates; public Response(Collection certificates) { this.certificates = certificates; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java index 97f4b7d619191..60d20046c4f29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.ack; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; @@ -20,12 +19,7 @@ */ public class AckWatchResponse extends ActionResponse { - private WatchStatus status; - - public AckWatchResponse(StreamInput in) throws IOException { - super(in); - status = in.readBoolean() ? new WatchStatus(in) : null; - } + private final WatchStatus status; public AckWatchResponse(@Nullable WatchStatus status) { this.status = status; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java index 8091ba3b5ca26..8b0717c9855ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.activate; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; @@ -20,12 +19,7 @@ */ public class ActivateWatchResponse extends ActionResponse { - private WatchStatus status; - - public ActivateWatchResponse(StreamInput in) throws IOException { - super(in); - status = in.readBoolean() ? new WatchStatus(in) : null; - } + private final WatchStatus status; public ActivateWatchResponse(@Nullable WatchStatus status) { this.status = status; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java index 261a31211e497..cdb4503945904 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -26,12 +25,6 @@ public class ExecuteWatchResponse extends ActionResponse implements ToXContentOb private final String recordId; private final XContentSource recordSource; - public ExecuteWatchResponse(StreamInput in) throws IOException { - super(in); - recordId = in.readString(); - recordSource = XContentSource.readFrom(in); - } - public ExecuteWatchResponse(String recordId, BytesReference recordSource, XContentType contentType) { this.recordId = recordId; this.recordSource = new XContentSource(recordSource, contentType); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java index d1da1cc490f4b..789925f3832ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -22,32 +21,13 @@ public class GetWatchResponse extends ActionResponse implements ToXContentObject { - private String id; - private WatchStatus status; - private boolean found; - private XContentSource source; - private long version; - private long seqNo; - private long primaryTerm; - - public GetWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - found = in.readBoolean(); - if (found) { - status = new WatchStatus(in); - source = XContentSource.readFrom(in); - version = in.readZLong(); - seqNo = in.readZLong(); - primaryTerm = in.readVLong(); - } else { - status = null; - source = null; - version = Versions.NOT_FOUND; - seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM; - } - } + private final String id; + private final WatchStatus status; + private final boolean found; + private final XContentSource source; + private final long version; + private final long seqNo; + private final long primaryTerm; /** * ctor for missing watch diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index b755d3497f649..b05f7065ff63c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.TransportIndexAction; @@ -68,7 +67,6 @@ public void testFindPrivilegesThatGrant() { equalTo(List.of("monitor", "cross_cluster_replication", "manage", "all")) ); assertThat(findPrivilegesThatGrant(RefreshAction.NAME), equalTo(List.of("maintenance", "manage", "all"))); - assertThat(findPrivilegesThatGrant(ShrinkAction.NAME), equalTo(List.of("manage", "all"))); } public void testPrivilegesForRollupFieldCapsAction() { diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 3c4be50b25a73..d23f1e4b89a8c 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -553,7 +553,10 @@ public void onFailure(Exception e) { fail("downsample index has not been created"); } }); - downsample(sourceIndex, downsampleIndex, config); + + // Downsample with retries, in case the downsampled index is not ready. + assertBusy(() -> downsample(sourceIndex, downsampleIndex, config), 120, TimeUnit.SECONDS); + // We must wait until the in-progress downsample ends, otherwise data will not be cleaned up assertBusy(() -> assertTrue("In progress downsample did not complete", downsampleListener.success), 60, TimeUnit.SECONDS); } @@ -1186,7 +1189,7 @@ private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, D measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000 ); assertEquals(1, measurement.attributes().size()); - assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration"))); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration", "failed"))); } }, 10, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java index 249cf66e39458..f7e6f166cf53f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java @@ -112,7 +112,6 @@ public static Request parse(XContentParser parser, String name) { public static class Response extends ActionResponse implements ToXContentObject { private final QueryRuleset queryRuleset; - private static final ParseField QUERY_RULESET_FIELD = new ParseField("queryRuleset"); public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 668ecec0e393d..87ef4dd0b3eff 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.string-templates' esplugin { name 'x-pack-esql' description 'The plugin that powers ESQL for Elasticsearch' @@ -222,3 +223,59 @@ tasks.register("regen") { } } } + +tasks.named("spotlessJava") { dependsOn stringTemplates } +tasks.named('checkstyleMain').configure { + excludes = [ "**/*.java.st" ] +} + +def prop(Type, type, TYPE, BYTES, Array) { + return [ + "Type" : Type, + "type" : type, + "TYPE" : TYPE, + "BYTES" : BYTES, + "Array" : Array, + + "int" : type == "int" ? "true" : "", + "long" : type == "long" ? "true" : "", + "double" : type == "double" ? "true" : "", + "BytesRef" : type == "BytesRef" ? "true" : "", + "boolean" : type == "boolean" ? "true" : "", + ] +} + +tasks.named('stringTemplates').configure { + var intProperties = prop("Int", "int", "INT", "Integer.BYTES", "IntArray") + var longProperties = prop("Long", "long", "LONG", "Long.BYTES", "LongArray") + var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") + var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") + var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") + // enrich + File enrichResultBuilderInput = file("src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st") + template { + it.properties = intProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java" + } + template { + it.properties = longProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java" + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 43181a344e268..f365a2ed78610 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BooleanBuilder permi @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BooleanBlock block); - @Override BooleanBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 09c436e805d57..32627a0e0d36b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -71,55 +71,6 @@ public BooleanBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BooleanBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(BooleanBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final BooleanVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBoolean(vector.getBoolean(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBoolean(block.getBoolean(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BooleanBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 5f5e1f9caa488..a6c75dbc1122f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -229,19 +229,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BytesRefBuilder perm @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BytesRefBlock block); - @Override BytesRefBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index aed422b0c0104..4ef7ed4084228 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -78,56 +78,6 @@ protected void writeNullValue() { values.append(BytesRefBlock.NULL_VALUE); } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BytesRefBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(BytesRefBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - BytesRef scratch = new BytesRef(); - final BytesRefVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBytesRef(vector.getBytesRef(p, scratch)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBytesRef(block.getBytesRef(i++, scratch)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BytesRefBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 27d70caaa18fe..a682c2cba019e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.DoubleBuilder permit @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(DoubleBlock block); - @Override DoubleBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 427127784869a..5921c2daa9f92 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -71,55 +71,6 @@ public DoubleBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((DoubleBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(DoubleBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final DoubleVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendDouble(vector.getDouble(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendDouble(block.getDouble(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public DoubleBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index a34d50bf6ff55..e9d606b51c6a1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.IntBuilder permits I @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(IntBlock block); - @Override IntBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index aaf46798fd789..85f943004de29 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -71,55 +71,6 @@ public IntBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((IntBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(IntBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final IntVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendInt(vector.getInt(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendInt(block.getInt(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public IntBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 21c3eb4257b8d..3e1c5fcfaac95 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.LongBuilder permits @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(LongBlock block); - @Override LongBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index 5d8daf306809d..d24ae214da63a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -71,55 +71,6 @@ public LongBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((LongBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(LongBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final LongVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendLong(vector.getLong(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendLong(block.getLong(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public LongBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 7fc92da1943ac..0e34eaa68881f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -183,12 +183,6 @@ interface Builder extends BlockLoader.Builder, Releasable { */ Builder endPositionEntry(); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(Block block); - /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index c2ac99a7c8489..3df75f4bc1c56 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -172,11 +172,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - return appendNull(); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 8d3497a66a2d7..2751cd31fd362 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -149,11 +149,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException("DocBlock doesn't support appendBlockAndMerge"); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 9893ea1826945..2404217d11f95 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -84,6 +84,10 @@ public boolean singleSegmentNonDecreasing() { return singleSegmentNonDecreasing; } + public boolean singleSegment() { + return shards.isConstant() && segments.isConstant(); + } + private boolean checkIfSingleSegmentNonDecreasing() { if (getPositionCount() < 2) { return true; @@ -138,35 +142,57 @@ private void buildShardSegmentDocMapIfMissing() { for (int p = 0; p < forwards.length; p++) { forwards[p] = p; } - new IntroSorter() { - int pivot; - - @Override - protected void setPivot(int i) { - pivot = finalForwards[i]; - } - - @Override - protected int comparePivot(int j) { - int cmp = Integer.compare(shards.getInt(pivot), shards.getInt(finalForwards[j])); - if (cmp != 0) { - return cmp; + if (singleSegment()) { + new IntroSorter() { + int pivot; + + @Override + protected void setPivot(int i) { + pivot = finalForwards[i]; + } + + @Override + protected int comparePivot(int j) { + return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); } - cmp = Integer.compare(segments.getInt(pivot), segments.getInt(finalForwards[j])); - if (cmp != 0) { - return cmp; + + @Override + protected void swap(int i, int j) { + int tmp = finalForwards[i]; + finalForwards[i] = finalForwards[j]; + finalForwards[j] = tmp; + } + }.sort(0, forwards.length); + } else { + new IntroSorter() { + int pivot; + + @Override + protected void setPivot(int i) { + pivot = finalForwards[i]; } - return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); - } - @Override - protected void swap(int i, int j) { - int tmp = finalForwards[i]; - finalForwards[i] = finalForwards[j]; - finalForwards[j] = tmp; - } - }.sort(0, forwards.length); + @Override + protected int comparePivot(int j) { + int cmp = Integer.compare(shards.getInt(pivot), shards.getInt(finalForwards[j])); + if (cmp != 0) { + return cmp; + } + cmp = Integer.compare(segments.getInt(pivot), segments.getInt(finalForwards[j])); + if (cmp != 0) { + return cmp; + } + return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); + } + @Override + protected void swap(int i, int j) { + int tmp = finalForwards[i]; + finalForwards[i] = finalForwards[j]; + finalForwards[j] = tmp; + } + }.sort(0, forwards.length); + } backwards = new int[forwards.length]; for (int p = 0; p < forwards.length; p++) { backwards[forwards[p]] = p; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java index 8616d7a7e1bc6..fd9dd6a479298 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java @@ -165,11 +165,6 @@ public void close() { blockFactory.adjustBreaker(-ordsSize(ords.length)); } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException(); - } - @Override public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 3850e3da7c796..331a5713fa3d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -277,19 +277,6 @@ $endif$ @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition($Type$Block block); - @Override $Type$Block build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 5b432f1c62968..fab3be0be4233 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -113,66 +113,6 @@ $if(BytesRef)$ } $endif$ - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition(($Type$Block) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition($Type$Block block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } -$if(BytesRef)$ - BytesRef scratch = new BytesRef(); -$endif$ - final $Type$Vector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { -$if(BytesRef)$ - appendBytesRef(vector.getBytesRef(p, scratch)); -$else$ - append$Type$(vector.get$Type$(p)); -$endif$ - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { -$if(BytesRef)$ - appendBytesRef(block.getBytesRef(i++, scratch)); -$else$ - append$Type$(block.get$Type$(i++)); -$endif$ - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public $Type$BlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index 08be21f95786f..eab2a314b2074 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -137,7 +137,22 @@ protected Page process(Page page) { boolean success = false; try { if (docVector.singleSegmentNonDecreasing()) { - loadFromSingleLeaf(blocks, docVector); + IntVector docs = docVector.docs(); + int shard = docVector.shards().getInt(0); + int segment = docVector.segments().getInt(0); + loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { + @Override + public int count() { + return docs.getPositionCount(); + } + + @Override + public int get(int i) { + return docs.getInt(i); + } + }); + } else if (docVector.singleSegment()) { + loadFromSingleLeafUnsorted(blocks, docVector); } else { try (LoadFromMany many = new LoadFromMany(blocks, docVector)) { many.run(); @@ -200,38 +215,24 @@ private boolean positionFieldWorkDocGuarteedAscending(int shard, int segment) { return true; } - private void loadFromSingleLeaf(Block[] blocks, DocVector docVector) throws IOException { - int shard = docVector.shards().getInt(0); - int segment = docVector.segments().getInt(0); - int firstDoc = docVector.docs().getInt(0); + private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoader.Docs docs) throws IOException { + int firstDoc = docs.get(0); positionFieldWork(shard, segment, firstDoc); - IntVector docs = docVector.docs(); - BlockLoader.Docs loaderDocs = new BlockLoader.Docs() { - @Override - public int count() { - return docs.getPositionCount(); - } - - @Override - public int get(int i) { - return docs.getInt(i); - } - }; StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; List rowStrideReaders = new ArrayList<>(fields.length); - ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.getPositionCount()); + ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.count()); LeafReaderContext ctx = ctx(shard, segment); try { for (int f = 0; f < fields.length; f++) { FieldWork field = fields[f]; BlockLoader.ColumnAtATimeReader columnAtATime = field.columnAtATime(ctx); if (columnAtATime != null) { - blocks[f] = (Block) columnAtATime.read(loaderBlockFactory, loaderDocs); + blocks[f] = (Block) columnAtATime.read(loaderBlockFactory, docs); } else { rowStrideReaders.add( new RowStrideReaderWork( field.rowStride(ctx), - (Block.Builder) field.loader.builder(loaderBlockFactory, docs.getPositionCount()), + (Block.Builder) field.loader.builder(loaderBlockFactory, docs.count()), f ) ); @@ -248,7 +249,7 @@ public int get(int i) { ); } StoredFieldLoader storedFieldLoader; - if (useSequentialStoredFieldsReader(docVector.docs())) { + if (useSequentialStoredFieldsReader(docs)) { storedFieldLoader = StoredFieldLoader.fromSpecSequential(storedFieldsSpec); trackStoredFields(storedFieldsSpec, true); } else { @@ -259,8 +260,8 @@ public int get(int i) { storedFieldLoader.getLoader(ctx, null), storedFieldsSpec.requiresSource() ? shardContexts.get(shard).newSourceLoader.get().leaf(ctx.reader(), null) : null ); - for (int p = 0; p < docs.getPositionCount(); p++) { - int doc = docs.getInt(p); + for (int p = 0; p < docs.count(); p++) { + int doc = docs.get(p); if (storedFields != null) { storedFields.advanceTo(doc); } @@ -278,6 +279,30 @@ public int get(int i) { } } + private void loadFromSingleLeafUnsorted(Block[] blocks, DocVector docVector) throws IOException { + IntVector docs = docVector.docs(); + int[] forwards = docVector.shardSegmentDocMapForwards(); + int shard = docVector.shards().getInt(0); + int segment = docVector.segments().getInt(0); + loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { + @Override + public int count() { + return docs.getPositionCount(); + } + + @Override + public int get(int i) { + return docs.getInt(forwards[i]); + } + }); + final int[] backwards = docVector.shardSegmentDocMapBackwards(); + for (int i = 0; i < blocks.length; i++) { + Block in = blocks[i]; + blocks[i] = in.filter(backwards); + in.close(); + } + } + private class LoadFromMany implements Releasable { private final Block[] target; private final IntVector shards; @@ -371,9 +396,9 @@ public void close() { * Is it more efficient to use a sequential stored field reader * when reading stored fields for the documents contained in {@code docIds}? */ - private boolean useSequentialStoredFieldsReader(IntVector docIds) { - return docIds.getPositionCount() >= SEQUENTIAL_BOUNDARY - && docIds.getInt(docIds.getPositionCount() - 1) - docIds.getInt(0) == docIds.getPositionCount() - 1; + private boolean useSequentialStoredFieldsReader(BlockLoader.Docs docs) { + int count = docs.count(); + return count >= SEQUENTIAL_BOUNDARY && docs.get(count - 1) - docs.get(0) == count - 1; } private void trackStoredFields(StoredFieldsSpec spec, boolean sequential) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java deleted file mode 100644 index 9c1b02aa74107..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.data; - -import org.elasticsearch.compute.operator.ComputeTestCase; - -import java.util.ArrayList; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class BlockBuilderAppendBlockTests extends ComputeTestCase { - - public void testBasic() { - BlockFactory blockFactory = blockFactory(); - IntBlock src = blockFactory.newIntBlockBuilder(10) - .appendInt(1) - .appendNull() - .beginPositionEntry() - .appendInt(4) - .appendInt(6) - .endPositionEntry() - .appendInt(10) - .appendInt(20) - .appendInt(30) - .appendNull() - .beginPositionEntry() - .appendInt(1) - .endPositionEntry() - .build(); - // copy position by position - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - for (int i = 0; i < src.getPositionCount(); i++) { - try (IntBlock filter = src.filter(i)) { - dst.appendAllValuesToCurrentPosition(filter); - } - } - try (IntBlock block = dst.build()) { - assertThat(block, equalTo(src)); - } - } - // copy all block - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - try (IntBlock block = dst.appendAllValuesToCurrentPosition(src).build()) { - assertThat(block.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - try (Block dst = randomlyDivideAndMerge(src)) { - assertThat(dst.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(dst, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - - public void testRandomNullBlock() { - BlockFactory blockFactory = blockFactory(); - IntBlock.Builder src = blockFactory.newIntBlockBuilder(10); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - src.appendInt(101); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - IntBlock block = src.build(); - assertThat(block.getPositionCount(), equalTo(3)); - assertTrue(block.isNull(0)); - assertThat(block.getInt(1), equalTo(101)); - assertTrue(block.isNull(2)); - try (Block flatten = randomlyDivideAndMerge(block)) { - assertThat(flatten.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); - } - } - - public void testRandom() { - ElementType elementType = randomFrom(ElementType.INT, ElementType.BYTES_REF, ElementType.BOOLEAN); - Block block = BasicBlockTests.randomBlock( - elementType, - randomIntBetween(1, 1024), - randomBoolean(), - 0, - between(1, 16), - 0, - between(0, 16) - ).block(); - - block = randomlyDivideAndMerge(block); - block.close(); - } - - private Block randomlyDivideAndMerge(Block block) { - while (block.getPositionCount() > 1 || randomBoolean()) { - int positionCount = block.getPositionCount(); - int offset = 0; - Block.Builder builder = block.elementType() - .newBlockBuilder(randomIntBetween(1, 100), TestBlockFactory.getNonBreakingInstance()); - List expected = new ArrayList<>(); - while (offset < positionCount) { - int length = randomIntBetween(1, positionCount - offset); - int[] positions = new int[length]; - for (int i = 0; i < length; i++) { - positions[i] = offset + i; - } - offset += length; - Block sub = block.filter(positions); - expected.add(extractAndFlattenBlockValues(sub)); - builder.appendAllValuesToCurrentPosition(sub); - sub.close(); - } - block.close(); - block = builder.build(); - assertThat(block.getPositionCount(), equalTo(expected.size())); - for (int i = 0; i < block.getPositionCount(); i++) { - assertThat(BlockUtils.toJavaObject(block, i), equalTo(expected.get(i))); - } - } - return block; - } - - static Object extractAndFlattenBlockValues(Block block) { - List values = new ArrayList<>(); - for (int i = 0; i < block.getPositionCount(); i++) { - Object v = BlockUtils.toJavaObject(block, i); - if (v == null) { - continue; - } - if (v instanceof List l) { - values.addAll(l); - } else { - values.add(v); - } - } - if (values.isEmpty()) { - return null; - } else if (values.size() == 1) { - return values.get(0); - } else { - return values; - } - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index a2b074c1403a0..4595b26ca27aa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -113,12 +113,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public IntBlock build() { return builder.build(); @@ -174,12 +168,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public LongBlock build() { return builder.build(); @@ -235,12 +223,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public DoubleBlock build() { return builder.build(); @@ -296,12 +278,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BytesRefBlock build() { return builder.build(); @@ -360,12 +336,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BooleanBlock build() { return builder.build(); diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java index 0f2bf2703f62f..b32a7385d12c5 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -71,7 +71,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx original.getApiCallSection().getNodeSelector() ); - String id = (String) startResponse.evaluate("id"); + String id = startResponse.evaluate("id"); boolean finishedEarly = id == null; if (finishedEarly) { /* diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index 752775b20b0e3..e04435b715c99 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -167,7 +167,7 @@ public void testNonExistentEnrichPolicy_KeepField() throws IOException { public void testMatchField_ImplicitFieldsList() throws IOException { Map result = runEsql( - new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number") + new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number | sort number") ); var columns = List.of(Map.of("name", "number", "type", "long")); var values = List.of(List.of(1000), List.of(1000), List.of(5000)); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index b67432f491cf3..5aa48234cb11a 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -322,7 +322,7 @@ public void testNullInAggs() throws IOException { matchesMap().entry("values", List.of(List.of(1))).entry("columns", List.of(Map.of("name", "min(value)", "type", "long"))) ); - builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value) by group"); + builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value) by group | sort group, `min(value)`"); result = runEsql(builder); assertMap( result, @@ -478,7 +478,8 @@ public void testWarningHeadersOnFailedConversions() throws IOException { bulkLoadTestData(count); Request request = prepareRequest(SYNC); - var query = fromIndex() + " | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; + var query = fromIndex() + + " | sort integer asc | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; var mediaType = attachBody(new RequestObjectBuilder().query(query).build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); @@ -493,7 +494,7 @@ public void testWarningHeadersOnFailedConversions() throws IOException { int expectedWarnings = Math.min(count / 2, 20); var warnings = response.getWarnings(); assertThat(warnings.size(), is(1 + expectedWarnings)); - var firstHeader = "Line 1:36: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + var firstHeader = "Line 1:55: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + "treating result as null. Only first 20 failures recorded."; assertThat(warnings.get(0), containsString(firstHeader)); for (int i = 1; i <= expectedWarnings; i++) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md new file mode 100644 index 0000000000000..fdd52c6aac229 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md @@ -0,0 +1,178 @@ +# ESQL's CSV-SPEC Integration Tests + +ESQL has lots of different kinds of integration tests! Like the rest of +Elasticsearch it has YAML tests and Java Rest tests and ESIntegTestCase +subclasses, but it *also* has CSV-SPEC tests. You can think of them like +the YAML tests, but they can *only* call _query and assert on the response. +That simplicity let's us run them in lots of contexts and keeps them *fast*. +As such, most of ESQL's integration tests are CSV-SPEC tests. + +## Running + +CSV-SPEC tests run in lots of different ways. The simplest way to run a +CSV-SPEC test is to open ESQL's CsvTests.java and run it right in IntelliJ using +the unit runner. As of this writing that runs 1,350 tests in about 35 seconds. +It's fast because it doesn't stand up an Elasticsearch node at all. It runs +like a big unit test + +The second-simplest way to run the CSV-SPEC tests is to run `EsqlSpecIT` in +`:x-pack:plugin:esql:qa:server:single-node` via the Gradle runner in IntelliJ +or on the command line. That will boot a real Elasticsearch node, create some +test data, and run the tests. The tests are reused in a few more scenarios, +include multi-node and mixed-cluster. + +## Organization + +The CSV-SPEC tests grew organically for a long time, but we've since grown +general organizing principles. But lots of tests don't follow those principles. +See organic growth. Anyway! + +### Files named after types + +Basic support for a type, like, say, `integer` or `geo_point` will live in a +file named after the type. + +* `boolean` +* `date` +* `floats` (`double`) +* `ints` (`integer` and `long`) +* `ip` +* `null` +* `unsigned_long` +* `version` + +Many functions can take lots of different types as input. Like `TO_STRING` +and `VALUES`. Those tests also live in these files. + +### Themed functions + +Some files are named after groups of functions and contain, unsurprisingly, +the tests for those functions: + +* `comparison` +* `conditional` +* `math` + +### Files named after operations + +Lots of commands have files named after operations in the ESQL language and +contain the integration testing of the syntax and options in that operation. +Operations will appear in many of the other files, especially `FROM`, `WHERE`, +`LIMIT`, and `EVAL`, but to test particular functions. + +* `dissect` +* `drop` +* `enrich` +* `eval` +* `grok` +* `order` +* `keep` +* `limit` +* `meta` +* `mv_expand` +* `rename` +* `row` +* `stats` +* `topN` +* `where` +* `where-like` + +### Deprecated files + +When we first implemented copying snippets into the documentation I dumped all +the snippets into `docs.csv-spec`. This was supposed to be a temporary holding +area until they were relocated, and we haven't had time to do that. Don't put +more tests in there. + +## Embedding examples in the documentation + +Snippets from these tests can be embedded into the asciidoc documentation of +ESQL using the following rather arcane snippet: + +```asciidoc +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=sin] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=sin-result] +|=== +``` +
+ What is this asciidoc syntax? + +The first section is a source code block for the ES|QL query: + +- a [source](https://docs.asciidoctor.org/asciidoc/latest/verbatim/source-blocks/) code block (delimited by `----`) + - `source.merge.styled,esql` indicates custom syntax highlighting for ES|QL +- an [include directive](https://docs.asciidoctor.org/asciidoc/latest/directives/include/) to import content from another file (i.e. test files here) into the current document +- a directory path defined as an [attribute](https://docs.asciidoctor.org/asciidoc/latest/attributes/document-attributes/) or variable, within curly braces: `{esql-specs}` +- a [tagged region](https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/#tagging-regions) `[tag=sin]` to only include a specific section of file + +The second section is the response returned as a table: + +- styled using `[%header.monospaced.styled,format=dsv,separator=|]` +- delimited by `|===` +- again using includes, attributes, and tagged regions +
+ +The example above extracts the `sin` test from the `floats` file. If you are +writing the tests for a function don't build this by hand, instead annotate +the `.java` file for the function with `@FunctionInfo` and add an `examples` +field like this: + +```java +@FunctionInfo( + returnType = "double", + description = "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle.", + examples = @Example(file = "floats", tag = "sin") +) +``` + +Running the tests will generate the asciidoc files for you. See +`esql/functions/README.md` for all of the docs the tests generate. + +Either way, CSV-SPEC files must be tagged using four special comments so snippets can be +included in the docs: + +```csv-spec +sin +// tag::sin[] +ROW a=1.8 +| EVAL sin=SIN(a) +// end::sin[] +; + +// tag::sin-result[] +a:double | sin:double + 1.8 | 0.9738476308781951 +// end::sin-result[] +; +``` + +The `// tag::` and `// end::` are standard asciidoc syntax for working with [tagged regions](https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/#tagging-regions). Weird looking but +you aren't going to type it by accident! + +Finally, this'll appear in the docs as a table kind of like this: + +| a:double | sin:double | +|---------:|-------------------:| +| 1.8 | 0.9738476308781951 | + +### Skipping tests in old versions + +CSV-SPEC tests run against half-upgraded clusters in the +`x-pack:plugin:esql:qa:server:mixed-cluster` project and will fail if they test +new behavior against an old node. To stop them from running you should create +a `NodeFeature` in `EsqlFeatures` for your change. Then you can skip it by +adding a `required_feature` to your test like so: +```csv-spec +mvSlice +required_feature: esql.mv_sort + +row a = [true, false, false, true] +| eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); +``` + +That skips nodes that don't have the `esql.mv_sort` feature. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec index a2411cfd7a335..aa6529c2d4319 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -52,6 +52,29 @@ id:l | name:keyword | shape:cartesian_shape 16 | Bottom left point | POINT(0.5 0.5) ; +whereDisjointSinglePolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5) +; + #################################################################################################### # Test against a polygon smaller in size to the Bottom Left polygon @@ -99,6 +122,29 @@ id:l | name:keyword | shape:cartesian_shape 16 | Bottom left point | POINT(0.5 0.5) ; +whereDisjointSmallerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5); +; + #################################################################################################### # Test against a polygon similar in size to the entire test data @@ -175,6 +221,17 @@ id:l | name:keyword | shape:cartesian_shape 19 | Top left point | POINT(0.5 2.5) ; +whereDisjointLargerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + #################################################################################################### # Test against a polygon larger than all test data @@ -250,3 +307,14 @@ id:l | name:keyword | shape:cartesian_shape 18 | Top right point | POINT(2.5 2.5) 19 | Top left point | POINT(0.5 2.5) ; + +whereDisjointEvenLargerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 9b06e9a0a8b23..85b665d717449 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -424,3 +424,69 @@ emp_no:i -10002 -10003 ; + +sortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| SORT emp_no + salary ASC +| EVAL emp_no = -emp_no +| LIMIT 10 +| EVAL sum = -emp_no + salary +| KEEP emp_no, salary, sum +; + + emp_no:i | salary:i | sum:i +-10015 |25324 |35339 +-10035 |25945 |35980 +-10092 |25976 |36068 +-10048 |26436 |36484 +-10057 |27215 |37272 +-10084 |28035 |38119 +-10026 |28336 |38362 +-10068 |28941 |39009 +-10060 |29175 |39235 +-10042 |30404 |40446 +; + +sortConcat1#[skip:-8.13.99,reason:supported in 8.14] +from employees +| sort concat(left(last_name, 1), left(first_name, 1)), salary desc +| keep first_name, last_name, salary +| eval ll = left(last_name, 1), lf = left(first_name, 1) +| limit 10 +; + + first_name:keyword | last_name:keyword | salary:integer|ll:keyword|lf:keyword +Mona |Azuma |46595 |A |M +Satosi |Awdeh |50249 |A |S +Brendon |Bernini |33370 |B |B +Breannda |Billingsley |29175 |B |B +Cristinel |Bouloucos |58715 |B |C +Charlene |Brattka |28941 |B |C +Margareta |Bierman |41933 |B |M +Mokhtar |Bernatsky |38992 |B |M +Parto |Bamford |61805 |B |P +Premal |Baek |52833 |B |P +; + +sortConcat2#[skip:-8.13.99,reason:supported in 8.14] +from employees +| eval ln = last_name, fn = first_name, concat = concat(left(last_name, 1), left(first_name, 1)) +| sort concat(left(ln, 1), left(fn, 1)), salary desc +| keep f*, l*, salary +| eval c = concat(left(last_name, 1), left(first_name, 1)) +| drop *name, lan* +| limit 10 +; + + fn:keyword | ln:keyword | salary:integer| c:keyword +Mona |Azuma |46595 |AM +Satosi |Awdeh |50249 |AS +Brendon |Bernini |33370 |BB +Breannda |Billingsley |29175 |BB +Cristinel |Bouloucos |58715 |BC +Charlene |Brattka |28941 |BC +Margareta |Bierman |41933 |BM +Mokhtar |Bernatsky |38992 |BM +Parto |Bamford |61805 |BP +Premal |Baek |52833 |BP +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index 11fb0ab532945..c2c0b82f1a664 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -133,7 +133,7 @@ convertFromDatetimeWithOptions required_feature: esql.from_options // tag::convertFromDatetimeWithOptions[] - FROM employees OPTIONS "allow_no_indices"="false","preference"="_shards:0" + FROM employees OPTIONS "allow_no_indices"="false","preference"="_local" | SORT emp_no | EVAL hire_double = to_double(hire_date) | KEEP emp_no, hire_date, hire_double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec index facf06eb6a960..14a3807b8729c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec @@ -280,10 +280,10 @@ avg_salary:double | x:double ; averageOfEvalValue -from employees | eval ratio = salary / height | stats avg(ratio); +from employees | eval ratio = salary / height | stats avg = avg(ratio) | eval avg = round(avg, 8); -avg(ratio):double -27517.279737149947 +avg:double +27517.27973715 ; simpleWhere diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 746684aca3e38..492da4ee5ef36 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -7,7 +7,7 @@ synopsis:keyword "double asin(number:double|integer|long|unsigned_long)" "double atan(number:double|integer|long|unsigned_long)" "double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" +"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "double avg(number:double|integer|long)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" @@ -22,7 +22,7 @@ synopsis:keyword "long date_extract(datePart:keyword|text, date:date)" "keyword date_format(?dateFormat:keyword|text, date:date)" "date date_parse(?datePattern:keyword|text, dateString:keyword|text)" -"date date_trunc(interval:keyword, date:date)" +"date date_trunc(interval:date_period|time_duration, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" @@ -30,6 +30,7 @@ double e() "integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" +"integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" "double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" "double log10(number:double|integer|long|unsigned_long)" "keyword|text ltrim(string:keyword|text)" @@ -65,6 +66,7 @@ double pi() "double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" "boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" +"boolean st_disjoint(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "double st_x(point:geo_point|cartesian_point)" @@ -115,7 +117,7 @@ acos |number |"double|integer|long|unsigne asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] -auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] +auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] avg |number |"double|integer|long" |[""] case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. @@ -130,7 +132,7 @@ date_diff |[unit, startTimestamp, endTimestamp]|["keyword|text", date, date] date_extract |[datePart, date] |["keyword|text", date] |[Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era., Date expression] date_format |[dateFormat, date] |["keyword|text", date] |[A valid date pattern, Date expression] date_parse |[datePattern, dateString] |["keyword|text", "keyword|text"] |[A valid date pattern, A string representing a date] -date_trunc |[interval, date] |[keyword, date] |[Interval; expressed using the timespan literal syntax., Date expression] +date_trunc |[interval, date] |["date_period|time_duration", date] |[Interval; expressed using the timespan literal syntax., Date expression] e |null |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[, ] floor |number |"double|integer|long|unsigned_long" |[""] @@ -138,6 +140,7 @@ greatest |first |"integer|long|double|boolean least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |[""] +locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] log |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |["Base of logarithm. If `null`\, the function returns `null`. If not provided\, this function returns the natural logarithm (base e) of a value.", "Numeric expression. If `null`\, the function returns `null`."] log10 |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. ltrim |string |"keyword|text" |[""] @@ -173,6 +176,7 @@ split |[string, delim] |["keyword|text", "keyword|te sqrt |number |"double|integer|long|unsigned_long" |[""] st_centroid |field |"geo_point|cartesian_point" |[""] st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] +st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_x |point |"geo_point|cartesian_point" |[""] @@ -247,6 +251,7 @@ greatest |Returns the maximum value from many columns. least |Returns the minimum value from many columns. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. +locate |Returns an integer that indicates the position of a keyword substring within another string log |Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. Logs of zero, negative numbers, and base of one return `null` as well as a warning. log10 |Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. Logs of 0 and negative numbers return `null` as well as a warning. ltrim |Removes leading whitespaces from a string. @@ -282,6 +287,7 @@ split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. st_centroid |The centroid of a spatial field. st_contains |Returns whether the first geometry contains the second geometry. +st_disjoint |Returns whether the two geometries or geometry columns are disjoint. st_intersects |Returns whether the two geometries or geometry columns intersect. st_within |Returns whether the first geometry is within the second geometry. st_x |Extracts the x-coordinate from a point geometry. @@ -357,6 +363,7 @@ greatest |"integer|long|double|boolean|keyword|text|ip|version" least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false +locate |integer |[false, false, true] |false |false log |double |[true, false] |false |false log10 |double |false |false |false ltrim |"keyword|text" |false |false |false @@ -392,6 +399,7 @@ split |keyword sqrt |double |false |false |false st_centroid |"geo_point|cartesian_point" |false |false |true st_contains |boolean |[false, false] |false |false +st_disjoint |boolean |[false, false] |false |false st_intersects |boolean |[false, false] |false |false st_within |boolean |[false, false] |false |false st_x |double |false |false |false @@ -447,5 +455,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -100 | 100 | 100 +102 | 102 | 102 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index c1421d91dffa5..843b2674967fe 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -605,6 +605,91 @@ location:geo_point | city_location:geo_point | count:long POINT (0 0) | POINT (0 0) | 1 ; +############################################### +# Tests for ST_DISJOINT on GEO_POINT type + +literalPolygonDisjointLiteralPoint +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_DISJOINT(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPolygonDisjointLiteralPointOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) +; + +disjoint:boolean +false +; + +literalPointDisjointLiteralPolygonOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_GEOPOINT("POINT(-1 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +disjoint:boolean +true +; + +pointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM airports +| WHERE ST_DISJOINT(location, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| EVAL x = ST_X(location), y = ST_Y(location) +| EVAL x = FLOOR(x / 100), y = FLOOR(y / 100) +| STATS count=COUNT() BY x, y +| KEEP x, y, count +| SORT x ASC, y ASC +; + +x:double | y:double | count:long +-2 | -1 | 8 +-2 | 0 | 94 +-1 | -1 | 67 +-1 | 0 | 201 +0 | 0 | 15 +1 | -1 | 33 +1 | 0 | 53 +; + +airportCityLocationPointDisjointCentroid +required_feature: esql.st_disjoint + +FROM airports_mp +| WHERE ST_DISJOINT(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (67.8581917192787 24.02956652920693) | POINT (67.81638333333332 24.048999999999996) | 6 +; + ############################################### # Tests for ST_CONTAINS on GEO_POINT type @@ -1167,6 +1252,148 @@ centroid:cartesian_point | count:long POINT (4783520.5 1661010.0) | 1 ; +cartesianPointIntersectsLiteralPolygonCount +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() +; + +count:long +444 +; + +############################################### +# Tests for ST_DISJOINT on CARTESIAN_POINT type + +literalPolygonDisjointLiteralCartesianPoint +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:cartesian_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalCartesianPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_DISJOINT(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPolygonDisjointLiteralCartesianPointOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_CARTESIANPOINT("POINT(0 0)")) +; + +disjoint:boolean +false +; + +literalCartesianPointDisjointLiteralPolygonOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_CARTESIANPOINT("POINT(-1 0)"), TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +disjoint:boolean +true +; + +cartesianPointDisjointLiteralPolygonCount +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() +; + +count:long +405 +; + +cartesianPointIntersectsDisjointLiteralPolygonCount +required_feature: esql.st_disjoint + +FROM airports_web +| EVAL intersects = ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| EVAL disjoint = ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() BY intersects, disjoint +| SORT intersects DESC, disjoint DESC +| KEEP intersects, disjoint, count +; + +intersects:boolean | disjoint:boolean | count:long +true | false | 444 +false | true | 405 +; + +cartesianPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| EVAL x = ST_X(location), y = ST_Y(location) +| EVAL x = FLOOR(x / 10000000), y = FLOOR(y / 10000000) +| STATS count=COUNT() BY x, y +| KEEP x, y, count +| SORT x ASC, y ASC +; + +x:double | y:double | count:long +-2 | -1 | 8 +-2 | 0 | 136 +-2 | 1 | 3 +-1 | -1 | 64 +-1 | 0 | 192 +-1 | 1 | 2 +; + +cartesianPointDisjointEmptyGeometry +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("LINESTRING()")) +| STATS count=COUNT() +; + +warning:Line 2:31: evaluation of [TO_CARTESIANSHAPE(\"LINESTRING()\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:31: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +count:long +0 +; + +cartesianPointDisjointInvalidGeometry +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("Invalid Geometry")) +| STATS count=COUNT() +; + +warning:Line 2:31: evaluation of [TO_CARTESIANSHAPE(\"Invalid Geometry\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:31: java.lang.IllegalArgumentException: Failed to parse WKT: Unknown geometry type: invalid + +count:long +0 +; + ############################################### # Tests for ST_CONTAINS on CARTESIAN_POINT type diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index f010ed13370e0..6d0d15c398986 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -213,6 +213,27 @@ intersects:boolean true ; +############################################### +# Tests for ST_DISJOINT with GEO_SHAPE + +polygonDisjointLiteralPolygon +required_feature: esql.st_disjoint + +// tag::st_disjoint-airport_city_boundaries[] +FROM airport_city_boundaries +| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| KEEP abbrev, airport, region, city, city_location +// end::st_disjoint-airport_city_boundaries[] +| SORT abbrev +| LIMIT 1 +; + +// tag::st_disjoint-airport_city_boundaries-result[] +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +ACA | General Juan N Alvarez Int'l | Acapulco de Juárez | Acapulco de Juárez | POINT (-99.8825 16.8636) +// end::st_disjoint-airport_city_boundaries-result[] +; + ############################################### # Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE @@ -422,6 +443,37 @@ wkt:keyword | shape:ca "POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))" | POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000)) | POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400)) ; +############################################### +# Tests for ST_DISJOINT with CARTESIAN_SHAPE + +cartesianPolygonDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM countries_bbox_web +| WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) +| SORT id DESC +| LIMIT 1 +; + +id:keyword | name:keyword | shape:cartesian_shape +ZWE | Zimbabwe | BBOX (2809472.180051312, 3681512.6693309383, -1760356.671722378, -2561396.0054164226) +; + +cartesianPolygonDisjointEmptyGeometry +required_feature: esql.st_disjoint + +FROM countries_bbox_web +| WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("LINESTRING()")) +| STATS count=COUNT() +; + +warning:Line 2:28: evaluation of [TO_CARTESIANSHAPE(\"LINESTRING()\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:28: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +count:long +0 +; + ############################################### # Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 70d5053c64c45..867ff127c90e8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -137,6 +137,7 @@ avgOfDouble FROM employees | STATS AVG(height) // end::avg[] +| EVAL `AVG(height)` = ROUND(`AVG(height)`, 5) ; // tag::avg-result[] @@ -159,7 +160,7 @@ h:double 1.76818359375 ; avgOfScaledFloat -from employees | stats h = avg(height.scaled_float); +from employees | stats h = avg(height.scaled_float) | eval h = round(h, 4); h:double 1.7682 @@ -1025,13 +1026,13 @@ c:long | cd:long docsStatsAvgNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] // tag::docsStatsAvgNestedExpression[] FROM employees -| STATS avg_salary_change = AVG(MV_AVG(salary_change)) +| STATS avg_salary_change = ROUND(AVG(MV_AVG(salary_change)), 10) // end::docsStatsAvgNestedExpression[] ; // tag::docsStatsAvgNestedExpression-result[] avg_salary_change:double -1.3904535864978902 +1.3904535865 // end::docsStatsAvgNestedExpression-result[] ; @@ -1550,3 +1551,61 @@ s2point1:d | s_mv:i | languages:i 2.1 | 3 | 5 2.1 | 3 | null ; + +evalOverridingKey +FROM employees +| EVAL k = languages +| STATS c = COUNT() BY languages, k +| DROP k +| SORT languages +; + +c:l| languages:i +15 | 1 +19 | 2 +17 | 3 +18 | 4 +21 | 5 +10 | null +; + +evalMultipleOverridingKeys#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| EVAL k = languages, k1 = k +| STATS c = COUNT() BY languages, k, k1, languages +| DROP k +| SORT languages +; + +c:l | k1:i | languages:i +15 | 1 | 1 +19 | 2 | 2 +17 | 3 | 3 +18 | 4 | 4 +21 | 5 | 5 +10 | null | null +; + +minWithSortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + languages; + + min:i | languages:i +25324 |5 +25976 |1 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; + +minWithSortExpression2#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + CASE(languages == 5, 655, languages); + + min:i | languages:i +25976 |1 +25324 |5 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index d9c9e535c2c45..5a81a05cee143 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -172,6 +172,13 @@ emp_no:integer | last_name:keyword | x:keyword | z:keyword 10010 | Piveteau | P | a ; +substring Emoji#[skip:-8.13.99,reason:bug fix in 8.14] +row a = "🐱Meow!🐶Woof!" | eval sub1 = substring(a, 2) | eval sub2 = substring(a, 2, 100); + +a:keyword | sub1:keyword | sub2:keyword +🐱Meow!🐶Woof! | Meow!🐶Woof! | Meow!🐶Woof! +; + ltrim from employees | sort emp_no | limit 10 | eval name = concat(" ", first_name, " ") | eval name = ltrim(name) | eval name = concat("'", name, "'") | keep emp_no, name; @@ -792,7 +799,7 @@ emp_no:integer | full_name:keyword | full_name_2:keyword | job_positions:keyword ; showTextFields -from hosts | where host == "beta" | keep host, host_group, description; +from hosts | sort description, card, ip0, ip1 | where host == "beta" | keep host, host_group, description; ignoreOrder:true host:keyword | host_group:text | description:text @@ -1168,3 +1175,115 @@ from employees | where emp_no == 10001 | eval split = split("fooMbar", gender) | gender:keyword | split:keyword M | [foo, bar] ; + +locate#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll"); + +a:keyword | a_ll:integer +hello | 3 +; + +locateFail#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "int"); + +a:keyword | a_ll:integer +hello | 0 +; + +locateZeroStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 0); + +a:keyword | a_ll:integer +hello | 3 +; + +locateExactStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 3); + +a:keyword | a_ll:integer +hello | 3 +; + +locateLongerStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 10); + +a:keyword | a_ll:integer +hello | 0 +; + +locateLongerSubstr#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "farewell"); + +a:keyword | a_ll:integer +hello | 0 +; + +locateSame#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "hello"); + +a:keyword | a_ll:integer +hello | 1 +; + +locateWithSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +from employees | where emp_no <= 10010 | eval f_s = substring(last_name, 2) | eval f_l = locate(last_name, f_s) | keep emp_no, last_name, f_s, f_l; +ignoreOrder:true + +emp_no:integer | last_name:keyword | f_s:keyword | f_l:integer +10001 | Facello | acello | 2 +10002 | Simmel | immel | 2 +10003 | Bamford | amford | 2 +10004 | Koblick | oblick | 2 +10005 | Maliniak | aliniak | 2 +10006 | Preusig | reusig | 2 +10007 | Zielinski | ielinski | 2 +10008 | Kalloufi | alloufi | 2 +10009 | Peac | eac | 2 +10010 | Piveteau | iveteau | 2 +; + +locateUtf16Emoji#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 2) | eval f_l = locate(a, f_s); + +a:keyword | f_s:keyword | f_l:integer +🐱Meow!🐶Woof! | Meow!🐶Woof! | 2 +; + +locateNestedSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = substring(a, locate(a, "ll")); + +a:keyword | a_ll:keyword +hello | llo +; + +locateNestSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(substring(a, 2), "ll"); + +a:keyword | a_ll:integer +hello | 2 +; + +locateStats#[skip:-8.13.99,reason:new string function added in 8.14] +from employees | where emp_no <= 10010 | eval f_l = locate(last_name, "ll") | stats min(f_l), max(f_l) by job_positions | sort job_positions | limit 5; + +min(f_l):integer | max(f_l):integer | job_positions:keyword +5 | 5 | Accountant +0 | 0 | Architect +0 | 0 | Head Human Resources +0 | 3 | Internship +3 | 3 | Junior Developer +; + +locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] +required_feature: esql.mv_warn + +from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; +ignoreOrder:true +warning:Line 1:80: evaluation of [locate(description, \"ate\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:80: java.lang.IllegalArgumentException: single-value function encountered multi-value + +l1:integer | l2:integer +2 | null +2 | null +null | 0 +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 17082e9855761..686fb831aa042 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -994,29 +994,19 @@ public void testOverlappingIndexPatterns() throws Exception { .add(new IndexRequest("test_overlapping_index_patterns_2").id("1").source("field", "foo")) .get(); - assertVerificationException("from test_overlapping_index_patterns_* | sort field"); + assertThrows(VerificationException.class, () -> run("from test_overlapping_index_patterns_* | sort field")); } public void testErrorMessageForUnknownColumn() { - var e = assertVerificationException("row a = 1 | eval x = b"); + var e = expectThrows(VerificationException.class, () -> run("row a = 1 | eval x = b")); assertThat(e.getMessage(), containsString("Unknown column [b]")); } - // Straightforward verification. Subclasses can override. - protected Exception assertVerificationException(String esqlCommand) { - return expectThrows(VerificationException.class, () -> run(esqlCommand)); - } - public void testErrorMessageForEmptyParams() { - var e = assertParsingException("row a = 1 | eval x = ?"); + var e = expectThrows(ParsingException.class, () -> run("row a = 1 | eval x = ?")); assertThat(e.getMessage(), containsString("Not enough actual parameters 0")); } - // Straightforward verification. Subclasses can override. - protected Exception assertParsingException(String esqlCommand) { - return expectThrows(ParsingException.class, () -> run(esqlCommand)); - } - public void testEmptyIndex() { assertAcked(client().admin().indices().prepareCreate("test_empty").setMapping("k", "type=keyword", "v", "type=long").get()); try (EsqlQueryResponse results = run("from test_empty")) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index e884b67fb5d24..e2e635917ed1c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -25,8 +24,6 @@ import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; -import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.nio.file.Path; @@ -37,7 +34,6 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.TimeValue.timeValueSeconds; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsEqual.equalTo; @@ -122,26 +118,6 @@ AcknowledgedResponse deleteAsyncId(String id) { } } - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertVerificationException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, VerificationException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("verification_exception")); - } - return e; - } - - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertParsingException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, ParsingException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("parsing_exception")); - } - return e; - } - public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { public LocalStateEsqlAsync(final Settings settings, final Path configPath) { super(settings, configPath); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java new file mode 100644 index 0000000000000..0427afb6d80c8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Booleans. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBoolean extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForBoolean(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + BooleanBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getBoolean(firstValueIndex + v); + } + } + } + + private boolean[] extendCell(boolean[] oldCell, int newValueCount) { + if (oldCell == null) { + return new boolean[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBoolean(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java new file mode 100644 index 0000000000000..ff881da5baf44 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for BytesRefs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBytesRef extends EnrichResultBuilder { + private final BytesRefArray bytes; // shared between all cells + private ObjectArray cells; + + EnrichResultBuilderForBytesRef(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } + } + + @Override + void addInputPage(IntVector positions, Page page) { + BytesRefBlock block = page.getBlock(channel); + BytesRef scratch = new BytesRef(); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(totalPositions)) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBytesRef(bytes.get(v, scratch)); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(bytes, cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java new file mode 100644 index 0000000000000..93c178d816326 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Doubles. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForDouble extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForDouble(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + DoubleBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getDouble(firstValueIndex + v); + } + } + } + + private double[] extendCell(double[] oldCell, int newValueCount) { + if (oldCell == null) { + return new double[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendDouble(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java new file mode 100644 index 0000000000000..4dec877e0d1e4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Ints. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForInt extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForInt(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + IntBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getInt(firstValueIndex + v); + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendInt(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java new file mode 100644 index 0000000000000..0dd4d1d0a8a0d --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Longs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForLong extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForLong(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + LongBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getLong(firstValueIndex + v); + } + } + } + + private long[] extendCell(long[] oldCell, int newValueCount) { + if (oldCell == null) { + return new long[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendLong(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..62b5761cfd655 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..4f11da3c474a9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..adb5a33b83f3b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..186a1299a4a98 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..675b6cc58197e --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..1b22e67d11b25 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..1df80cf90bd10 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..9bdc60813ad67 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java new file mode 100644 index 0000000000000..24055ad44f624 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -0,0 +1,166 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. + * This class is generated. Do not edit it. + */ +public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator substr; + + private final EvalOperator.ExpressionEvaluator start; + + private final DriverContext driverContext; + + public LocateEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator substr, EvalOperator.ExpressionEvaluator start, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.substr = substr; + this.start = start; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (BytesRefBlock substrBlock = (BytesRefBlock) substr.eval(page)) { + try (IntBlock startBlock = (IntBlock) start.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + BytesRefVector substrVector = substrBlock.asVector(); + if (substrVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + IntVector startVector = startBlock.asVector(); + if (startVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + return eval(page.getPositionCount(), strVector, substrVector, startVector).asBlock(); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock, + IntBlock startBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (substrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (substrBlock.getValueCount(p) != 1) { + if (substrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(Locate.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), substrBlock.getBytesRef(substrBlock.getFirstValueIndex(p), substrScratch), startBlock.getInt(startBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector, + IntVector startVector) { + try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch), startVector.getInt(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "LocateEvaluator[" + "str=" + str + ", substr=" + substr + ", start=" + start + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, substr, start); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory substr; + + private final EvalOperator.ExpressionEvaluator.Factory start; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory substr, + EvalOperator.ExpressionEvaluator.Factory start) { + this.source = source; + this.str = str; + this.substr = substr; + this.start = start; + } + + @Override + public LocateEvaluator get(DriverContext context) { + return new LocateEvaluator(source, str.get(context), substr.get(context), start.get(context), context); + } + + @Override + public String toString() { + return "LocateEvaluator[" + "str=" + str + ", substr=" + substr + ", start=" + start + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java new file mode 100644 index 0000000000000..947b1ecb49d0c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -0,0 +1,139 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. + * This class is generated. Do not edit it. + */ +public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator substr; + + private final DriverContext driverContext; + + public LocateNoStartEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator substr, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.substr = substr; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (BytesRefBlock substrBlock = (BytesRefBlock) substr.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + BytesRefVector substrVector = substrBlock.asVector(); + if (substrVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + return eval(page.getPositionCount(), strVector, substrVector).asBlock(); + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (substrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (substrBlock.getValueCount(p) != 1) { + if (substrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(Locate.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), substrBlock.getBytesRef(substrBlock.getFirstValueIndex(p), substrScratch))); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector) { + try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, substr); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory substr; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory substr) { + this.source = source; + this.str = str; + this.substr = substr; + } + + @Override + public LocateNoStartEvaluator get(DriverContext context) { + return new LocateNoStartEvaluator(source, str.get(context), substr.get(context), context); + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 77120c757e97a..e5d4e58d9d61b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -37,7 +37,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; -import org.elasticsearch.compute.operator.ProjectOperator; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasables; @@ -318,22 +317,10 @@ private void doLookup( 0 ) ); - - // drop docs block - intermediateOperators.add(droppingBlockOperator(extractFields.size() + 2, 0)); - boolean singleLeaf = searchContext.searcher().getLeafContexts().size() == 1; - // merging field-values by position - final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 1).toArray(); + final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 2).toArray(); intermediateOperators.add( - new MergePositionsOperator( - singleLeaf, - inputPage.getPositionCount(), - 0, - mergingChannels, - mergingTypes, - driverContext.blockFactory() - ) + new MergePositionsOperator(inputPage.getPositionCount(), 1, mergingChannels, mergingTypes, driverContext.blockFactory()) ); AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); @@ -392,17 +379,6 @@ private Page createNullResponse(int positionCount, List extract } } - private static Operator droppingBlockOperator(int totalBlocks, int droppingPosition) { - var size = totalBlocks - 1; - var projection = new ArrayList(size); - for (int i = 0; i < totalBlocks; i++) { - if (i != droppingPosition) { - projection.add(i); - } - } - return new ProjectOperator(projection); - } - private class TransportHandler implements TransportRequestHandler { @Override public void messageReceived(LookupRequest request, TransportChannel channel, Task task) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java new file mode 100644 index 0000000000000..5bb42f3090695 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; + +/** + * An abstract class responsible for collecting values for an output block of enrich. + * The incoming values of the same position are combined and added to a single corresponding position. + */ +abstract class EnrichResultBuilder implements Releasable { + protected final BlockFactory blockFactory; + protected final int channel; + protected final int totalPositions; + private long usedBytes; + + EnrichResultBuilder(BlockFactory blockFactory, int channel, int totalPositions) { + this.blockFactory = blockFactory; + this.channel = channel; + this.totalPositions = totalPositions; + } + + /** + * Collects the input values from the input page. + * + * @param positions the positions vector + * @param page the input page. The block located at {@code channel} is the value block + */ + abstract void addInputPage(IntVector positions, Page page); + + abstract Block build(); + + final void adjustBreaker(long bytes) { + blockFactory.breaker().addEstimateBytesAndMaybeBreak(bytes, "<>"); + usedBytes += bytes; + } + + @Override + public void close() { + blockFactory.breaker().addWithoutBreaking(-usedBytes); + } + + static EnrichResultBuilder enrichResultBuilder(ElementType elementType, BlockFactory blockFactory, int channel, int totalPositions) { + return switch (elementType) { + case NULL -> new EnrichResultBuilderForNull(blockFactory, channel, totalPositions); + case INT -> new EnrichResultBuilderForInt(blockFactory, channel, totalPositions); + case LONG -> new EnrichResultBuilderForLong(blockFactory, channel, totalPositions); + case DOUBLE -> new EnrichResultBuilderForDouble(blockFactory, channel, totalPositions); + case BOOLEAN -> new EnrichResultBuilderForBoolean(blockFactory, channel, totalPositions); + case BYTES_REF -> new EnrichResultBuilderForBytesRef(blockFactory, channel, totalPositions); + default -> throw new IllegalArgumentException("no enrich result builder for [" + elementType + "]"); + }; + } + + private static class EnrichResultBuilderForNull extends EnrichResultBuilder { + EnrichResultBuilderForNull(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + assert page.getBlock(channel).areAllValuesNull() : "expected all nulls; but got values"; + } + + @Override + Block build() { + return blockFactory.newConstantNullBlock(totalPositions); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java index 89447807db5b9..a3b7a8be61e2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java @@ -11,12 +11,13 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import java.util.Arrays; +import java.util.Objects; /** * Combines values at the given blocks with the same positions into a single position for the blocks at the given channels @@ -44,21 +45,13 @@ */ final class MergePositionsOperator implements Operator { private boolean finished = false; - private int filledPositions = 0; - private final boolean singleMode; - private final int positionCount; private final int positionChannel; - private final Block.Builder[] outputBuilders; - private final int[] mergingChannels; - private final ElementType[] mergingTypes; - private PositionBuilder positionBuilder = null; + private final EnrichResultBuilder[] builders; private Page outputPage; - private final BlockFactory blockFactory; MergePositionsOperator( - boolean singleMode, int positionCount, int positionChannel, int[] mergingChannels, @@ -73,123 +66,51 @@ final class MergePositionsOperator implements Operator { + Arrays.toString(mergingTypes) ); } - this.blockFactory = blockFactory; - this.singleMode = singleMode; - this.positionCount = positionCount; this.positionChannel = positionChannel; - this.mergingChannels = mergingChannels; - this.mergingTypes = mergingTypes; - this.outputBuilders = new Block.Builder[mergingTypes.length]; + this.builders = new EnrichResultBuilder[mergingTypes.length]; try { for (int i = 0; i < mergingTypes.length; i++) { - outputBuilders[i] = mergingTypes[i].newBlockBuilder(positionCount, blockFactory); + builders[i] = EnrichResultBuilder.enrichResultBuilder(mergingTypes[i], blockFactory, mergingChannels[i], positionCount); } } finally { - if (outputBuilders[outputBuilders.length - 1] == null) { - Releasables.close(outputBuilders); + if (builders[builders.length - 1] == null) { + Releasables.close(builders); } } } @Override public boolean needsInput() { - return true; + return finished == false; } @Override public void addInput(Page page) { try { final IntBlock positions = page.getBlock(positionChannel); - final int currentPosition = positions.getInt(0); - if (singleMode) { - fillNullUpToPosition(currentPosition); - for (int i = 0; i < mergingChannels.length; i++) { - int channel = mergingChannels[i]; - outputBuilders[i].appendAllValuesToCurrentPosition(page.getBlock(channel)); - } - filledPositions++; - } else { - if (positionBuilder != null && positionBuilder.position != currentPosition) { - flushPositionBuilder(); - } - if (positionBuilder == null) { - positionBuilder = new PositionBuilder(currentPosition, mergingTypes, blockFactory); - } - positionBuilder.combine(page, mergingChannels); + final IntVector positionsVector = Objects.requireNonNull(positions.asVector(), "positions must be a vector"); + for (EnrichResultBuilder builder : builders) { + builder.addInputPage(positionsVector, page); } } finally { Releasables.closeExpectNoException(page::releaseBlocks); } } - static final class PositionBuilder implements Releasable { - private final int position; - private final Block.Builder[] builders; - - PositionBuilder(int position, ElementType[] elementTypes, BlockFactory blockFactory) { - this.position = position; - this.builders = new Block.Builder[elementTypes.length]; - try { - for (int i = 0; i < builders.length; i++) { - builders[i] = elementTypes[i].newBlockBuilder(1, blockFactory); - } - } finally { - if (builders[builders.length - 1] == null) { - Releasables.close(builders); - } - } - } - - void combine(Page page, int[] channels) { - for (int i = 0; i < channels.length; i++) { - Block block = page.getBlock(channels[i]); - builders[i].appendAllValuesToCurrentPosition(block); - } - } - - void buildTo(Block.Builder[] output) { - for (int i = 0; i < output.length; i++) { - try (var b = builders[i]; Block block = b.build()) { - output[i].appendAllValuesToCurrentPosition(block); - } + @Override + public void finish() { + final Block[] blocks = new Block[builders.length]; + try { + for (int i = 0; i < builders.length; i++) { + blocks[i] = builders[i].build(); } - } - - @Override - public void close() { - Releasables.close(builders); - } - } - - private void flushPositionBuilder() { - fillNullUpToPosition(positionBuilder.position); - filledPositions++; - try (var p = positionBuilder) { - p.buildTo(outputBuilders); + outputPage = new Page(blocks); } finally { - positionBuilder = null; - } - } - - private void fillNullUpToPosition(int position) { - while (filledPositions < position) { - for (Block.Builder builder : outputBuilders) { - builder.appendNull(); + finished = true; + if (outputPage == null) { + Releasables.close(blocks); } - filledPositions++; - } - } - - @Override - public void finish() { - if (positionBuilder != null) { - flushPositionBuilder(); } - fillNullUpToPosition(positionCount); - final Block[] blocks = Block.Builder.buildAll(outputBuilders); - outputPage = new Page(blocks); - assert outputPage.getPositionCount() == positionCount; - finished = true; } @Override @@ -206,7 +127,7 @@ public Page getOutput() { @Override public void close() { - Releasables.close(Releasables.wrap(outputBuilders), positionBuilder, () -> { + Releasables.close(Releasables.wrap(builders), () -> { if (outputPage != null) { outputPage.releaseBlocks(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st new file mode 100644 index 0000000000000..4c5c9fabfa797 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +$else$ +import org.apache.lucene.util.RamUsageEstimator; +$endif$ +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +$if(long)$ +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.$Type$Block; +$else$ +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.IntVector; +$endif$ +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for $Type$s. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderFor$Type$ extends EnrichResultBuilder { +$if(BytesRef)$ + private final BytesRefArray bytes; // shared between all cells +$endif$ + private ObjectArray<$if(BytesRef)$int$else$$type$$endif$[]> cells; + + EnrichResultBuilderFor$Type$(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); +$if(BytesRef)$ + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } +$endif$ + } + + @Override + void addInputPage(IntVector positions, Page page) { + $Type$Block block = page.getBlock(channel); +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); +$if(BytesRef)$ + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } +$else$ + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.get$Type$(firstValueIndex + v); + } +$endif$ + } + } + + private $if(BytesRef)$int$else$$type$$endif$[] extendCell($if(BytesRef)$int$else$$type$$endif$[] oldCell, int newValueCount) { + if (oldCell == null) { + return new $if(BytesRef)$int$else$$type$$endif$[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(totalPositions)) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { +$if(BytesRef)$ + builder.appendBytesRef(bytes.get(v, scratch)); +$else$ + builder.append$Type$(v); +$endif$ + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close($if(BytesRef)$bytes, $endif$cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 1a27c7b69c1e6..178c714950b05 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -81,6 +81,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; @@ -90,6 +91,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; @@ -174,7 +176,8 @@ private FunctionDefinition[][] functions() { def(StartsWith.class, StartsWith::new, "starts_with"), def(EndsWith.class, EndsWith::new, "ends_with"), def(ToLower.class, ToLower::new, "to_lower"), - def(ToUpper.class, ToUpper::new, "to_upper") }, + def(ToUpper.class, ToUpper::new, "to_upper"), + def(Locate.class, Locate::new, "locate") }, // date new FunctionDefinition[] { def(DateDiff.class, DateDiff::new, "date_diff"), @@ -187,6 +190,7 @@ private FunctionDefinition[][] functions() { new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), def(SpatialContains.class, SpatialContains::new, "st_contains"), + def(SpatialDisjoint.class, SpatialDisjoint::new, "st_disjoint"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), def(SpatialWithin.class, SpatialWithin::new, "st_within"), def(StX.class, StX::new, "st_x"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java index 0cee9d2c53cde..7c9a788eed36c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java @@ -18,6 +18,12 @@ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.CONSTRUCTOR) public @interface Example { + + /** + * The description that will appear before the example + */ + String description() default ""; + /** * The test file that contains the example. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 0f35b95a287ad..39ad0351b199f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -34,14 +35,26 @@ public class DateTrunc extends BinaryDateTimeFunction implements EvaluatorMapper { - @FunctionInfo(returnType = "date", description = "Rounds down a date to the closest interval.") + @FunctionInfo( + returnType = "date", + description = "Rounds down a date to the closest interval.", + examples = { + @Example(file = "date", tag = "docsDateTrunc"), + @Example( + description = "Combine `DATE_TRUNC` with <> to create date histograms. For\n" + + "example, the number of hires per year:", + file = "date", + tag = "docsDateTruncHistogram" + ), + @Example(description = "Or an hourly error rate:", file = "conditional", tag = "docsCaseHourlyErrorRate") } + ) public DateTrunc( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial @Param( name = "interval", - type = { "keyword" }, + type = { "date_period", "time_duration" }, description = "Interval; expressed using the timespan literal syntax." ) Expression interval, @Param(name = "date", type = { "date" }, description = "Date expression") Expression field @@ -55,8 +68,8 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - return isDate(timestampField(), sourceText(), FIRST).and( - isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), SECOND, "dateperiod", "timeduration") + return isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), FIRST, "dateperiod", "timeduration").and( + isDate(timestampField(), sourceText(), SECOND) ); } @@ -105,7 +118,7 @@ private static Rounding.Prepared createRounding(final Period period, final ZoneI long periods = period.getUnits().stream().filter(unit -> period.get(unit) != 0).count(); if (periods != 1) { - throw new IllegalArgumentException("Time interval is not supported"); + throw new IllegalArgumentException("Time interval with multiple periods is not supported"); } final Rounding.Builder rounding; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index b9aeff7f1d935..ea581437f6c4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -90,8 +90,8 @@ public AutoBucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, @Param(name = "buckets", type = { "integer" }) Expression buckets, - @Param(name = "from", type = { "integer", "long", "double", "date", "string" }) Expression from, - @Param(name = "to", type = { "integer", "long", "double", "date", "string" }) Expression to + @Param(name = "from", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression from, + @Param(name = "to", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression to ) { super(source, List.of(field, buckets, from, to)); this.field = field; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 2a4915f38fb48..279f31e34ac95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -112,8 +112,6 @@ private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponen @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the first geometry contains the second geometry.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") ) public SpatialContains( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java new file mode 100644 index 0000000000000..7833f93b6270f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +/** + * This is the primary class for supporting the function ST_DISJOINT. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_DISJOINT and QueryRelation.DISJOINT. + */ +public class SpatialDisjoint extends SpatialRelatesFunction { + // public for test access with reflection + public static final SpatialRelations GEO = new SpatialRelations( + ShapeField.QueryRelation.DISJOINT, + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Disjoint") + ); + // public for test access with reflection + public static final SpatialRelations CARTESIAN = new SpatialRelations( + ShapeField.QueryRelation.DISJOINT, + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Disjoint") + ); + + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the two geometries or geometry columns are disjoint.", + examples = @Example(file = "spatial_shapes", tag = "st_disjoint-airport_city_boundaries") + ) + public SpatialDisjoint( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + private SpatialDisjoint(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.DISJOINT; + } + + @Override + public SpatialDisjoint withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialDisjoint(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialDisjoint replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialDisjoint(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialDisjoint::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Component2D component2D = asLuceneComponent2D(crsType, right()); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometry(docValueReader, component2D) + : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + Map> evaluatorRules() { + return evaluatorMap; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialDisjointGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointGeoSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 93965b0d3e9be..810e3206ada73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -66,8 +66,6 @@ public class SpatialIntersects extends SpatialRelatesFunction { @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the two geometries or geometry columns intersect.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial", tag = "st_intersects-airports") ) public SpatialIntersects( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index b18a3ba4926f4..51109aee29482 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -116,10 +116,14 @@ protected TypeResolution resolveType( if (resolution.unresolved()) { return resolution; } - crsType = SpatialCrsType.fromDataType(spatialExpression.dataType()); + setCrsType(spatialExpression.dataType()); return TypeResolution.TYPE_RESOLVED; } + protected void setCrsType(DataType dataType) { + crsType = SpatialCrsType.fromDataType(dataType); + } + public static TypeResolution isSameSpatialType( DataType spatialDataType, Expression expression, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index a5ade4cfeb73c..ca285ca07e27b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -67,8 +67,6 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the first geometry is within the second geometry.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") ) public SpatialWithin( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java new file mode 100644 index 0000000000000..c8b546718aabf --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.UnicodeUtil; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; + +/** + * Locate function, given a string 'a' and a substring 'b', it returns the index of the first occurrence of the substring 'b' in 'a'. + */ +public class Locate extends EsqlScalarFunction implements OptionalArgument { + + private final Expression str; + private final Expression substr; + private final Expression start; + + @FunctionInfo( + returnType = "integer", + description = "Returns an integer that indicates the position of a keyword substring within another string" + ) + public Locate( + Source source, + @Param(name = "string", type = { "keyword", "text" }, description = "An input string") Expression str, + @Param( + name = "substring", + type = { "keyword", "text" }, + description = "A substring to locate in the input string" + ) Expression substr, + @Param(optional = true, name = "start", type = { "integer" }, description = "The start index") Expression start + ) { + super(source, start == null ? Arrays.asList(str, substr) : Arrays.asList(str, substr, start)); + this.str = str; + this.substr = substr; + this.start = start; + } + + @Override + public DataType dataType() { + return DataTypes.INTEGER; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isString(str, sourceText(), FIRST); + if (resolution.unresolved()) { + return resolution; + } + resolution = isString(substr, sourceText(), SECOND); + if (resolution.unresolved()) { + return resolution; + } + + return start == null ? TypeResolution.TYPE_RESOLVED : isInteger(start, sourceText(), THIRD); + } + + @Override + public boolean foldable() { + return str.foldable() && substr.foldable() && (start == null || start.foldable()); + } + + @Evaluator + static int process(BytesRef str, BytesRef substr, int start) { + if (str == null || substr == null || str.length < substr.length) { + return 0; + } + int codePointCount = UnicodeUtil.codePointCount(str); + int indexStart = indexStart(codePointCount, start); + String utf8ToString = str.utf8ToString(); + int idx = utf8ToString.indexOf(substr.utf8ToString(), utf8ToString.offsetByCodePoints(0, indexStart)); + if (idx == -1) { + return 0; + } + return 1 + utf8ToString.codePointCount(0, idx); + } + + @Evaluator(extraName = "NoStart") + static int process(BytesRef str, BytesRef substr) { + return process(str, substr, 0); + } + + private static int indexStart(int codePointCount, int start) { + // esql is 1-based when it comes to string manipulation. We treat start = 0 and 1 the same + // a negative value is relative to the end of the string + int indexStart; + if (start > 0) { + indexStart = start - 1; + } else if (start < 0) { + indexStart = codePointCount + start; // start is negative, so this is a subtraction + } else { + indexStart = start; // start == 0 + } + return Math.min(Math.max(0, indexStart), codePointCount); // sanitise string start index + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Locate(source(), newChildren.get(0), newChildren.get(1), start == null ? null : newChildren.get(2)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Locate::new, str, substr, start); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + ExpressionEvaluator.Factory strExpr = toEvaluator.apply(str); + ExpressionEvaluator.Factory substrExpr = toEvaluator.apply(substr); + if (start == null) { + return new LocateNoStartEvaluator.Factory(source(), strExpr, substrExpr); + } + return new LocateEvaluator.Factory(source(), strExpr, substrExpr, toEvaluator.apply(start)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 3bd7d660352c3..a1f2586f4faed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -86,12 +86,9 @@ public boolean foldable() { @Evaluator(extraName = "NoLength") static BytesRef process(BytesRef str, int start) { - if (str.length == 0) { - return null; - } - int codePointCount = UnicodeUtil.codePointCount(str); - int indexStart = indexStart(codePointCount, start); - return new BytesRef(str.utf8ToString().substring(indexStart)); + int length = str.length; // we just need a value at least the length of the string + return process(str, start, length); + } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index a85ddac532241..27e3c95bd123a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -103,6 +103,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; @@ -113,6 +114,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; @@ -397,9 +399,11 @@ public static List namedTypeEntries() { of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith), of(ScalarFunction.class, EndsWith.class, PlanNamedTypes::writeEndsWith, PlanNamedTypes::readEndsWith), of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readIntersects), + of(ScalarFunction.class, SpatialDisjoint.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readDisjoint), of(ScalarFunction.class, SpatialContains.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readContains), of(ScalarFunction.class, SpatialWithin.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readWithin), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), + of(ScalarFunction.class, Locate.class, PlanNamedTypes::writeLocate, PlanNamedTypes::readLocate), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), of(ScalarFunction.class, Split.class, PlanNamedTypes::writeSplit, PlanNamedTypes::readSplit), @@ -1502,6 +1506,10 @@ static SpatialIntersects readIntersects(PlanStreamInput in) throws IOException { return new SpatialIntersects(Source.EMPTY, in.readExpression(), in.readExpression()); } + static SpatialDisjoint readDisjoint(PlanStreamInput in) throws IOException { + return new SpatialDisjoint(Source.EMPTY, in.readExpression(), in.readExpression()); + } + static SpatialContains readContains(PlanStreamInput in) throws IOException { return new SpatialContains(Source.EMPTY, in.readExpression(), in.readExpression()); } @@ -1592,6 +1600,19 @@ static void writeSubstring(PlanStreamOutput out, Substring substring) throws IOE out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); } + static Locate readLocate(PlanStreamInput in) throws IOException { + return new Locate(in.readSource(), in.readExpression(), in.readExpression(), in.readOptionalNamed(Expression.class)); + } + + static void writeLocate(PlanStreamOutput out, Locate locate) throws IOException { + out.writeSource(locate.source()); + List fields = locate.children(); + assert fields.size() == 2 || fields.size() == 3; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); + } + static Replace readReplace(PlanStreamInput in) throws IOException { return new Replace(Source.EMPTY, in.readExpression(), in.readExpression(), in.readExpression()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index d0375e0b50849..2aaf34a1dd1d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -84,6 +84,7 @@ import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.SubstituteSurrogates.rawTemporaryName; import static org.elasticsearch.xpack.ql.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN; @@ -125,7 +126,8 @@ protected static Batch substitutions() { new ReplaceRegexMatch(), new ReplaceAliasingEvalWithProject(), new SkipQueryOnEmptyMappings(), - new SubstituteSpatialSurrogates() + new SubstituteSpatialSurrogates(), + new ReplaceOrderByExpressionWithEval() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); } @@ -321,6 +323,35 @@ protected SpatialRelatesFunction rule(SpatialRelatesFunction function) { } } + static class ReplaceOrderByExpressionWithEval extends OptimizerRules.OptimizerRule { + private static int counter = 0; + + @Override + protected LogicalPlan rule(OrderBy orderBy) { + int size = orderBy.order().size(); + List evals = new ArrayList<>(size); + List newOrders = new ArrayList<>(size); + + for (int i = 0; i < size; i++) { + var order = orderBy.order().get(i); + if (order.child() instanceof Attribute == false) { + var name = rawTemporaryName("order_by", String.valueOf(i), String.valueOf(counter++)); + var eval = new Alias(order.child().source(), name, order.child()); + newOrders.add(order.replaceChildren(List.of(eval.toAttribute()))); + evals.add(eval); + } else { + newOrders.add(order); + } + } + if (evals.isEmpty()) { + return orderBy; + } else { + var newOrderBy = new OrderBy(orderBy.source(), new Eval(orderBy.source(), orderBy.child(), evals), newOrders); + return new Project(orderBy.source(), newOrderBy, orderBy.output()); + } + } + } + static class ConvertStringToByteRef extends OptimizerRules.OptimizerExpressionRule { ConvertStringToByteRef() { @@ -403,11 +434,6 @@ private static List projectAggregations( List upperProjection, List lowerAggregations ) { - AttributeMap lowerAliases = new AttributeMap<>(); - for (NamedExpression ne : lowerAggregations) { - lowerAliases.put(ne.toAttribute(), Alias.unwrap(ne)); - } - AttributeSet seen = new AttributeSet(); for (NamedExpression upper : upperProjection) { Expression unwrapped = Alias.unwrap(upper); @@ -431,11 +457,18 @@ private static List combineProjections( List lower ) { - // collect aliases in the lower list - AttributeMap aliases = new AttributeMap<>(); + // collect named expressions declaration in the lower list + AttributeMap namedExpressions = new AttributeMap<>(); + // while also collecting the alias map for resolving the source (f1 = 1, f2 = f1, etc..) + AttributeMap aliases = new AttributeMap<>(); for (NamedExpression ne : lower) { - if ((ne instanceof Attribute) == false) { - aliases.put(ne.toAttribute(), ne); + // record the alias + aliases.put(ne.toAttribute(), Alias.unwrap(ne)); + + // record named expression as is + if (ne instanceof Alias as) { + Expression child = as.child(); + namedExpressions.put(ne.toAttribute(), as.replaceChild(aliases.resolve(child, child))); } } List replaced = new ArrayList<>(); @@ -443,7 +476,7 @@ private static List combineProjections( // replace any matching attribute with a lower alias (if there's a match) // but clean-up non-top aliases at the end for (NamedExpression ne : upper) { - NamedExpression replacedExp = (NamedExpression) ne.transformUp(Attribute.class, a -> aliases.resolve(a, a)); + NamedExpression replacedExp = (NamedExpression) ne.transformUp(Attribute.class, a -> namedExpressions.resolve(a, a)); replaced.add((NamedExpression) trimNonTopLevelAliases(replacedExp)); } return replaced; @@ -476,7 +509,10 @@ private List replacePrunedAliasesUsedInGroupBy( var newGroupings = new ArrayList(groupings.size()); for (Expression group : groupings) { - newGroupings.add(group.transformUp(Attribute.class, a -> removedAliases.resolve(a, a))); + var transformed = group.transformUp(Attribute.class, a -> removedAliases.resolve(a, a)); + if (Expressions.anyMatch(newGroupings, g -> Expressions.equalsAsAttribute(g, transformed)) == false) { + newGroupings.add(transformed); + } } return newGroupings; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 29f0e04ef2b94..31c967fc3eee8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -80,6 +80,11 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature ST_CONTAINS_WITHIN = new NodeFeature("esql.st_contains_within"); + /** + * Support for spatial aggregation {@code ST_DISJOINT}. Done in #107007. + */ + private static final NodeFeature ST_DISJOINT = new NodeFeature("esql.st_disjoint"); + /** * The introduction of the {@code VALUES} agg. */ @@ -108,7 +113,8 @@ public Set getFeatures() { SPATIAL_SHAPES, ST_CENTROID, ST_INTERSECTS, - ST_CONTAINS_WITHIN + ST_CONTAINS_WITHIN, + ST_DISJOINT ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java index 8785b8f5de887..afb7ee6f53029 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java @@ -7,20 +7,29 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchWrapperException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.action.EsqlQueryTask; +import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.ql.plugin.AbstractTransportQlAsyncGetResultsAction; +import org.elasticsearch.xpack.ql.tree.Source; public class TransportEsqlAsyncGetResultsAction extends AbstractTransportQlAsyncGetResultsAction { @@ -51,8 +60,57 @@ public TransportEsqlAsyncGetResultsAction( this.blockFactory = blockFactory; } + @Override + protected void doExecute(Task task, GetAsyncResultRequest request, ActionListener listener) { + super.doExecute(task, request, unwrapListener(listener)); + } + @Override public Writeable.Reader responseReader() { return EsqlQueryResponse.reader(blockFactory); } + + static final String PARSE_EX_NAME = ElasticsearchException.getExceptionName(new ParsingException(Source.EMPTY, "")); + static final String VERIFY_EX_NAME = ElasticsearchException.getExceptionName(new VerificationException("")); + + /** + * Unwraps the exception in the case of failure. This keeps the exception types + * the same as the sync API, namely ParsingException and ParsingException. + */ + static ActionListener unwrapListener(ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(R o) { + listener.onResponse(o); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ElasticsearchWrapperException && e instanceof ElasticsearchException ee) { + e = unwrapEsException(ee); + } + if (e instanceof NotSerializableExceptionWrapper wrapper) { + String name = wrapper.getExceptionName(); + if (PARSE_EX_NAME.equals(name)) { + e = new ParsingException(Source.EMPTY, e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } else if (VERIFY_EX_NAME.contains(name)) { + e = new VerificationException(e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } + } + listener.onFailure(e); + } + }; + } + + static RuntimeException unwrapEsException(ElasticsearchException esEx) { + Throwable root = esEx.unwrapCause(); + if (root instanceof RuntimeException runtimeException) { + return runtimeException; + } + return esEx; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index a16c227f7f277..30cadb3e19dc8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -12,6 +12,8 @@ import org.apache.lucene.document.XYPointField; import org.apache.lucene.document.XYShape; import org.apache.lucene.geo.XYGeometry; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -24,6 +26,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.GeoShapeQueryable; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -221,7 +224,16 @@ org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, M } /** - * This code is based on the ShapeQueryPointProcessor.shapeQuery() method + * This code is based on the ShapeQueryPointProcessor.shapeQuery() method, with additional support for two special cases: + *
    + *
  • + * DISJOINT queries (using {@code EXISTS && !INTERSECTS}, similar to {@code LegacyGeoShapeQueryProcessor.geoShapeQuery()}) + *
  • + *
  • + * CONTAINS queries (if the shape is a point, INTERSECTS is used, otherwise a MatchNoDocsQuery is built, + * similar to {@code LatLonPoint.makeContainsGeometryQuery()}) + *
  • + *
*/ private static org.apache.lucene.search.Query pointShapeQuery( Geometry geometry, @@ -231,20 +243,28 @@ private static org.apache.lucene.search.Query pointShapeQuery( ) { final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); if (geometry == null || geometry.isEmpty()) { - // Should never be null, but can be an empty geometry - return new MatchNoDocsQuery(); + throw new QueryShardException(context, "Invalid/empty geometry"); } if (geometry.type() != ShapeType.POINT && relation == ShapeField.QueryRelation.CONTAINS) { - // A point field can never contain a non-point geometry - return new MatchNoDocsQuery(); + return new MatchNoDocsQuery("A point field can never contain a non-point geometry"); } final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); - org.apache.lucene.search.Query query = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + org.apache.lucene.search.Query intersects = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + if (relation == ShapeField.QueryRelation.DISJOINT) { + // XYPointField does not support DISJOINT queries, so we build one as EXISTS && !INTERSECTS + BooleanQuery.Builder bool = new BooleanQuery.Builder(); + org.apache.lucene.search.Query exists = ExistsQueryBuilder.newFilter(context, fieldName, false); + bool.add(exists, BooleanClause.Occur.MUST); + bool.add(intersects, BooleanClause.Occur.MUST_NOT); + return bool.build(); + } + + // Point-Intersects works for all cases except CONTAINS(shape) and DISJOINT, which are handled separately above if (hasDocValues) { final org.apache.lucene.search.Query queryDocValues = XYDocValuesField.newSlowGeometryQuery(fieldName, luceneGeometries); - query = new IndexOrDocValuesQuery(query, queryDocValues); + intersects = new IndexOrDocValuesQuery(intersects, queryDocValues); } - return query; + return intersects; } /** @@ -262,8 +282,7 @@ private static org.apache.lucene.search.Query shapeShapeQuery( throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); } if (geometry == null || geometry.isEmpty()) { - // Should never be null, but can be an empty geometry - return new MatchNoDocsQuery(); + throw new QueryShardException(context, "Invalid/empty geometry"); } final XYGeometry[] luceneGeometries; try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 44066ff3d091d..6ec1af033f86c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.Build; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -158,8 +159,13 @@ public void testRejectUnknownFields() { }""", "unknown field [asdf]"); } - public void testKnownVersionIsValid() throws IOException { + public void testKnownStableVersionIsValid() throws IOException { for (EsqlVersion version : EsqlVersion.values()) { + if (version == EsqlVersion.SNAPSHOT) { + // Not stable, skip. Also avoids breaking the CI as this is invalid for non-SNAPSHOT builds. + continue; + } + String validVersionString = randomBoolean() ? version.versionStringWithoutEmoji() : version.toString(); String json = String.format(Locale.ROOT, """ @@ -209,23 +215,27 @@ public void testSnapshotVersionIsOnlyValidOnSnapshot() throws IOException { "query": "ROW x = 1" } """, esqlVersion); - EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + + String errorOnNonSnapshotBuilds = "[version] with value [" + + esqlVersion + + "] only allowed in snapshot builds, latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]"; + + if (Build.current().isSnapshot()) { + assertNull(request.validate()); + } else { + assertNotNull(request.validate()); + assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); + } + request.onSnapshotBuild(true); assertNull(request.validate()); request.onSnapshotBuild(false); assertNotNull(request.validate()); - assertThat( - request.validate().getMessage(), - containsString( - "[version] with value [" - + esqlVersion - + "] only allowed in snapshot builds, latest available version is [" - + EsqlVersion.latestReleased().versionStringWithoutEmoji() - + "]" - ) - ); + assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104890") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index aedc789620480..f4ecf38915a29 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1118,36 +1118,36 @@ public void testDateParseOnIntPattern() { public void testDateTruncOnInt() { verifyUnsupported(""" from test - | eval date_trunc("1M", int) - """, "first argument of [date_trunc(\"1M\", int)] must be [datetime], found value [int] type [integer]"); + | eval date_trunc(1 month, int) + """, "second argument of [date_trunc(1 month, int)] must be [datetime], found value [int] type [integer]"); } public void testDateTruncOnFloat() { verifyUnsupported(""" from test - | eval date_trunc("1M", float) - """, "first argument of [date_trunc(\"1M\", float)] must be [datetime], found value [float] type [double]"); + | eval date_trunc(1 month, float) + """, "second argument of [date_trunc(1 month, float)] must be [datetime], found value [float] type [double]"); } public void testDateTruncOnText() { verifyUnsupported(""" from test - | eval date_trunc("1M", keyword) - """, "first argument of [date_trunc(\"1M\", keyword)] must be [datetime], found value [keyword] type [keyword]"); + | eval date_trunc(1 month, keyword) + """, "second argument of [date_trunc(1 month, keyword)] must be [datetime], found value [keyword] type [keyword]"); } public void testDateTruncWithNumericInterval() { verifyUnsupported(""" from test | eval date_trunc(1, date) - """, "second argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); + """, "first argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); } public void testDateTruncWithDateInterval() { verifyUnsupported(""" from test | eval date_trunc(date, date) - """, "second argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); + """, "first argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); } // check field declaration is validated even across duplicated declarations diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java new file mode 100644 index 0000000000000..f6e8b9107504c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class EnrichResultBuilderTests extends ESTestCase { + + public void testBytesRef() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.BYTES_REF, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newBytesRefBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + BytesRef val = new BytesRef(randomByteArrayOfLength(10)); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendBytesRef(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (BytesRefBlock actualOutput = (BytesRefBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getBytesRef(first + v, new BytesRef()), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testLong() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.LONG, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newLongBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + long val = randomLong(); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendLong(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (LongBlock actualOutput = (LongBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getLong(first + v), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + BlockFactory blockFactory() { + var bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(100)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + return new BlockFactory(breaker, bigArrays); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java index 80d127fc81907..09bc36a5390af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java @@ -32,7 +32,6 @@ public void testSimple() throws Exception { CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = new BlockFactory(breaker, bigArrays); MergePositionsOperator mergeOperator = new MergePositionsOperator( - randomBoolean(), 7, 0, new int[] { 1, 2 }, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index b97622f28520c..889dfbf4c9b17 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -214,7 +214,10 @@ public static ExpressionEvaluator.Factory evaluator(Expression e) { } Layout.Builder builder = new Layout.Builder(); buildLayout(builder, e); - assertTrue(e.resolved()); + Expression.TypeResolution resolution = e.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } return EvalMapper.toEvaluator(e, builder.build()); } @@ -242,18 +245,11 @@ protected final void assertResolveTypeValid(Expression expression, DataType expe } public final void testEvaluate() { - testEvaluate(false); - } - - public final void testEvaluateFloating() { - testEvaluate(true); - } - - private void testEvaluate(boolean readFloating) { assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); + boolean readFloating = randomBoolean(); Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); if (testCase.getExpectedTypeError() != null) { assertTrue("expected unresolved", expression.typeResolved().unresolved()); @@ -263,7 +259,10 @@ private void testEvaluate(boolean readFloating) { } return; } - assertFalse("expected resolved", expression.typeResolved().unresolved()); + Expression.TypeResolution resolution = expression.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } expression = new FoldNull().rule(expression); assertThat(expression.dataType(), equalTo(testCase.expectedType())); logger.info("Result type: " + expression.dataType()); @@ -296,47 +295,27 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { } /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read directly from the page. + * Evaluates a {@link Block} of values, all copied from the input pattern.. *

* Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. *

*/ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read from an intermediate operator. - *

- * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

- */ - public final void testEvaluateBlockWithoutNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); } /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page. + * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator. - */ - public final void testEvaluateBlockWithNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); } /** * Evaluates a {@link Block} of values, all copied from the input pattern, - * read directly from the {@link Page}, using the - * {@link CrankyCircuitBreakerService} which fails randomly. + * using the {@link CrankyCircuitBreakerService} which fails randomly. *

* Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. @@ -345,25 +324,7 @@ public final void testEvaluateBlockWithNullsFloating() { public final void testCrankyEvaluateBlockWithoutNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, - * read from an intermediate operator, using the - * {@link CrankyCircuitBreakerService} which fails randomly. - *

- * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

- */ - public final void testCrankyEvaluateBlockWithoutNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -371,27 +332,12 @@ public final void testCrankyEvaluateBlockWithoutNullsFloating() { /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page, - * using the {@link CrankyCircuitBreakerService} which fails randomly. + * some null values inserted between, using the {@link CrankyCircuitBreakerService} which fails randomly. */ public final void testCrankyEvaluateBlockWithNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator, - * using the {@link CrankyCircuitBreakerService} which fails randomly. - */ - public final void testCrankyEvaluateBlockWithNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -404,9 +350,10 @@ protected Matcher allNullsMatcher() { return nullValue(); } - private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls, boolean readFloating) { + private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { assumeTrue("can only run on representable types", testCase.allTypesAreRepresentable()); assumeTrue("must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); + boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); @@ -618,7 +565,7 @@ public static void testFunctionInfo() { for (Map.Entry, DataType> entry : signatures.entrySet()) { List types = entry.getKey(); for (int i = 0; i < args.size() && i < types.size(); i++) { - typesFromSignature.get(i).add(types.get(i).esType()); + typesFromSignature.get(i).add(signatureType(types.get(i))); } returnFromSignature.add(entry.getValue().esType()); } @@ -637,6 +584,10 @@ public static void testFunctionInfo() { } + private static String signatureType(DataType type) { + return type.esType() != null ? type.esType() : type.typeName(); + } + /** * Adds cases with {@code null} and asserts that the result is {@code null}. *

@@ -651,6 +602,28 @@ public static void testFunctionInfo() { * on input types like {@link Greatest} or {@link Coalesce}. */ protected static List anyNullIsNull(boolean entirelyNullPreservesType, List testCaseSuppliers) { + return anyNullIsNull( + testCaseSuppliers, + (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false + && nullValueDataType == DataTypes.NULL + && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), + (nullPosition, original) -> original + ); + } + + public interface ExpectedType { + DataType expectedType(int nullPosition, DataType nullValueDataType, TestCaseSupplier.TestCase original); + } + + public interface ExpectedEvaluatorToString { + Matcher evaluatorToString(int nullPosition, Matcher original); + } + + protected static List anyNullIsNull( + List testCaseSuppliers, + ExpectedType expectedType, + ExpectedEvaluatorToString evaluatorToString + ) { typesRequired(testCaseSuppliers); List suppliers = new ArrayList<>(testCaseSuppliers.size()); suppliers.addAll(testCaseSuppliers); @@ -673,15 +646,12 @@ protected static List anyNullIsNull(boolean entirelyNullPreser TestCaseSupplier.TestCase oc = original.get(); List data = IntStream.range(0, oc.getData().size()).mapToObj(i -> { TestCaseSupplier.TypedData od = oc.getData().get(i); - if (i == finalNullPosition) { - return new TestCaseSupplier.TypedData(null, od.type(), od.name()); - } - return od; + return i == finalNullPosition ? od.forceValueToNull() : od; }).toList(); return new TestCaseSupplier.TestCase( data, - oc.evaluatorToString(), - oc.expectedType(), + evaluatorToString.evaluatorToString(finalNullPosition, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, oc.getData().get(finalNullPosition).type(), oc), nullValue(), null, oc.getExpectedTypeError(), @@ -704,7 +674,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser return new TestCaseSupplier.TestCase( data, equalTo("LiteralsEvaluator[lit=null]"), - entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType(), + expectedType.expectedType(finalNullPosition, DataTypes.NULL, oc), nullValue(), null, oc.getExpectedTypeError(), @@ -810,9 +780,8 @@ private static Stream> allPermutations(int argumentCount) { if (argumentCount == 0) { return Stream.of(List.of()); } - if (argumentCount > 4) { - // TODO check for a limit 4. is arbitrary. - throw new IllegalArgumentException("would generate too many types"); + if (argumentCount > 3) { + throw new IllegalArgumentException("would generate too many combinations"); } Stream> stream = representable().map(t -> List.of(t)); for (int i = 1; i < argumentCount; i++) { @@ -894,6 +863,7 @@ protected static String typeErrorMessage(boolean includeOrdinal, List 0) { + builder.append(example.description()); + builder.append("\n"); + } builder.append(""" [source.merge.styled,esql] ---- diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index d600e51c07925..c064cfebd9cc5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1325,6 +1325,14 @@ public TypedData forceLiteral() { return new TypedData(data, type, name, true); } + /** + * Return a {@link TypedData} that always returns {@code null} for it's + * value without modifying anything else in the supplier. + */ + public TypedData forceValueToNull() { + return new TypedData(null, type, name, forceLiteral); + } + @Override public String toString() { if (type == DataTypes.UNSIGNED_LONG && data instanceof Long longData) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java index 4f897c47d73b8..98fbff6a816c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java @@ -7,28 +7,56 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.common.Rounding; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.SerializationTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.ql.expression.FieldAttribute; -import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.type.DateEsField; -import org.elasticsearch.xpack.ql.type.EsField; import java.time.Duration; import java.time.Instant; import java.time.Period; -import java.util.Collections; -import java.util.Map; +import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.createRounding; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.process; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateTruncTests extends AbstractFunctionTestCase { -public class DateTruncTests extends ESTestCase { + public DateTruncTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + long ts = toMillis("2023-02-17T10:25:33.38Z"); + List suppliers = List.of( + ofDatePeriod(Period.ofDays(1), ts, "2023-02-17T00:00:00.00Z"), + ofDatePeriod(Period.ofMonths(1), ts, "2023-02-01T00:00:00.00Z"), + ofDatePeriod(Period.ofYears(1), ts, "2023-01-01T00:00:00.00Z"), + ofDatePeriod(Period.ofDays(10), ts, "2023-02-12T00:00:00.00Z"), + // 7 days period should return weekly rounding + ofDatePeriod(Period.ofDays(7), ts, "2023-02-13T00:00:00.00Z"), + // 3 months period should return quarterly + ofDatePeriod(Period.ofMonths(3), ts, "2023-01-01T00:00:00.00Z"), + ofDuration(Duration.ofHours(1), ts, "2023-02-17T10:00:00.00Z"), + ofDuration(Duration.ofMinutes(1), ts, "2023-02-17T10:25:00.00Z"), + ofDuration(Duration.ofSeconds(1), ts, "2023-02-17T10:25:33.00Z"), + ofDuration(Duration.ofHours(3), ts, "2023-02-17T09:00:00.00Z"), + ofDuration(Duration.ofMinutes(15), ts, "2023-02-17T10:15:00.00Z"), + ofDuration(Duration.ofSeconds(30), ts, "2023-02-17T10:25:30.00Z"), + randomSecond() + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } public void testCreateRoundingDuration() { Rounding.Prepared rounding; @@ -71,7 +99,7 @@ public void testCreateRoundingPeriod() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); e = expectThrows(IllegalArgumentException.class, () -> createRounding(Period.of(0, 1, 1))); - assertThat(e.getMessage(), containsString("Time interval is not supported")); + assertThat(e.getMessage(), containsString("Time interval with multiple periods is not supported")); rounding = createRounding(Period.ofDays(1)); assertEquals(1, rounding.roundingSize(Rounding.DateTimeUnit.DAY_OF_MONTH), 0d); @@ -103,25 +131,6 @@ public void testCreateRoundingNullInterval() { public void testDateTruncFunction() { long ts = toMillis("2023-02-17T10:25:33.38Z"); - assertEquals(toMillis("2023-02-17T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(1)))); - assertEquals(toMillis("2023-02-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(1)))); - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofYears(1)))); - - assertEquals(toMillis("2023-02-12T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(10)))); - // 7 days period should return weekly rounding - assertEquals(toMillis("2023-02-13T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(7)))); - // 3 months period should return quarterly - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(3)))); - - assertEquals(toMillis("2023-02-17T10:00:00.00Z"), process(ts, createRounding(Duration.ofHours(1)))); - assertEquals(toMillis("2023-02-17T10:25:00.00Z"), process(ts, createRounding(Duration.ofMinutes(1)))); - assertEquals(toMillis("2023-02-17T10:25:33.00Z"), process(ts, createRounding(Duration.ofSeconds(1)))); - - assertEquals(toMillis("2023-02-17T09:00:00.00Z"), process(ts, createRounding(Duration.ofHours(3)))); - assertEquals(toMillis("2023-02-17T10:15:00.00Z"), process(ts, createRounding(Duration.ofMinutes(15)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> process(ts, createRounding(Period.ofDays(-1)))); assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); @@ -129,36 +138,71 @@ public void testDateTruncFunction() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); } - private static long toMillis(String timestamp) { - return Instant.parse(timestamp).toEpochMilli(); + private static TestCaseSupplier ofDatePeriod(Period period, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.DATE_PERIOD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(period, EsqlDataTypes.DATE_PERIOD, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); + } + + private static TestCaseSupplier ofDuration(Duration duration, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(duration, EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); } - public void testSerialization() { - var dateTrunc = new DateTrunc(Source.EMPTY, randomDateIntervalLiteral(), randomDateField()); - SerializationTestUtils.assertSerialization(dateTrunc); + private static TestCaseSupplier randomSecond() { + return new TestCaseSupplier("random second", List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), () -> { + String dateFragment = randomIntBetween(2000, 2050) + + "-" + + pad(randomIntBetween(1, 12)) + + "-" + + pad(randomIntBetween(1, 28)) + + "T" + + pad(randomIntBetween(0, 23)) + + ":" + + pad(randomIntBetween(0, 59)) + + ":" + + pad(randomIntBetween(0, 59)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Duration.ofSeconds(1), EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(dateFragment + ".00Z")) + ); + }); } - private static FieldAttribute randomDateField() { - String fieldName = randomAlphaOfLength(randomIntBetween(1, 25)); - String dateName = randomAlphaOfLength(randomIntBetween(1, 25)); - boolean hasDocValues = randomBoolean(); - if (randomBoolean()) { - return new FieldAttribute(Source.EMPTY, fieldName, new EsField(dateName, DataTypes.DATETIME, Map.of(), hasDocValues)); - } else { - return new FieldAttribute(Source.EMPTY, fieldName, DateEsField.dateEsField(dateName, Collections.emptyMap(), hasDocValues)); - } + private static String pad(int i) { + return i > 9 ? "" + i : "0" + i; + } + + private static long toMillis(String timestamp) { + return Instant.parse(timestamp).toEpochMilli(); } - private static Literal randomDateIntervalLiteral() { - Duration duration = switch (randomInt(5)) { - case 0 -> Duration.ofNanos(randomIntBetween(1, 100000)); - case 1 -> Duration.ofMillis(randomIntBetween(1, 1000)); - case 2 -> Duration.ofSeconds(randomIntBetween(1, 1000)); - case 3 -> Duration.ofMinutes(randomIntBetween(1, 1000)); - case 4 -> Duration.ofHours(randomIntBetween(1, 100)); - case 5 -> Duration.ofDays(randomIntBetween(1, 60)); - default -> throw new AssertionError(); - }; - return new Literal(Source.EMPTY, duration, EsqlDataTypes.TIME_DURATION); + @Override + protected Expression build(Source source, List args) { + return new DateTrunc(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java index 013753c801c39..9d8cf702a375a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java @@ -13,126 +13,139 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.hamcrest.Matcher; +import java.util.ArrayList; import java.util.List; +import java.util.function.LongSupplier; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class AutoBucketTests extends AbstractScalarFunctionTestCase { +public class AutoBucketTests extends AbstractFunctionTestCase { public AutoBucketTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Autobucket Single date", () -> { - List args = List.of( - new TestCaseSupplier.TypedData( - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), - DataTypes.DATETIME, - "arg" - ) - ); - return new TestCaseSupplier.TestCase( - args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", - DataTypes.DATETIME, - dateResultsMatcher(args) - ); - }), new TestCaseSupplier("Autobucket Single long", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100L, DataTypes.LONG, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastLongToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single int", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100, DataTypes.INTEGER, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastIntToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single double", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100.0, DataTypes.DOUBLE, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=Attribute[channel=0], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }))); + List suppliers = new ArrayList<>(); + dateCases(suppliers, "fixed date", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z")); + numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); + numberCases(suppliers, "fixed int", DataTypes.INTEGER, () -> 100); + numberCases(suppliers, "fixed double", DataTypes.DOUBLE, () -> 100.0); + // TODO make errorsForCasesWithoutExamples do something sensible for 4+ parameters + return parameterSuppliersFromTypedData( + anyNullIsNull( + suppliers, + (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL + ? DataTypes.NULL + : original.expectedType(), + (nullPosition, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") + ) + ); } - private Expression build(Source source, Expression arg) { - Literal from; - Literal to; - if (arg.dataType() == DataTypes.DATETIME) { - from = stringOrDateTime("2023-02-01T00:00:00.00Z"); - to = stringOrDateTime("2023-03-01T09:00:00.00Z"); - } else { - from = new Literal(Source.EMPTY, 0, DataTypes.DOUBLE); - to = new Literal(Source.EMPTY, 1000, DataTypes.DOUBLE); - } - return new AutoBucket(source, arg, new Literal(Source.EMPTY, 50, DataTypes.INTEGER), from, to); - } + // TODO once we cast above the functions we can drop these + private static final DataType[] DATE_BOUNDS_TYPE = new DataType[] { DataTypes.DATETIME, DataTypes.KEYWORD, DataTypes.TEXT }; - private Literal stringOrDateTime(String date) { - if (randomBoolean()) { - return new Literal(Source.EMPTY, new BytesRef(date), randomBoolean() ? DataTypes.KEYWORD : DataTypes.TEXT); + private static void dateCases(List suppliers, String name, LongSupplier date) { + for (DataType fromType : DATE_BOUNDS_TYPE) { + for (DataType toType : DATE_BOUNDS_TYPE) { + suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataTypes.DATETIME, "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-01T00:00:00.00Z")); + args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); + return new TestCaseSupplier.TestCase( + args, + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + DataTypes.DATETIME, + dateResultsMatcher(args) + ); + })); + } } - return new Literal(Source.EMPTY, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date), DataTypes.DATETIME); } - @Override - protected DataType expectedType(List argTypes) { - if (argTypes.get(0).isNumeric()) { - return DataTypes.DOUBLE; + private static TestCaseSupplier.TypedData dateBound(String name, DataType type, String date) { + Object value; + if (type == DataTypes.DATETIME) { + value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); + } else { + value = new BytesRef(date); } - return argTypes.get(0); + return new TestCaseSupplier.TypedData(value, type, name).forceLiteral(); } - private static Matcher dateResultsMatcher(List typedData) { - long millis = ((Number) typedData.get(0).data()).longValue(); - return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE }; + + private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { + for (DataType fromType : NUMBER_BOUNDS_TYPES) { + for (DataType toType : NUMBER_BOUNDS_TYPES) { + suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(number.get(), "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(numericBound("from", fromType, 0.0)); + args.add(numericBound("to", toType, 1000.0)); + // TODO more number types for "from" and "to" + String attr = "Attribute[channel=0]"; + if (numberType == DataTypes.INTEGER) { + attr = "CastIntToDoubleEvaluator[v=" + attr + "]"; + } else if (numberType == DataTypes.LONG) { + attr = "CastLongToDoubleEvaluator[v=" + attr + "]"; + } + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=" + + attr + + ", " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + dateResultsMatcher(args) + ); + })); + } + } } - private static Matcher numericResultsMatcher(List typedData, Object value) { - return equalTo(value); + private static TestCaseSupplier.TypedData numericBound(String name, DataType type, double value) { + Number v; + if (type == DataTypes.INTEGER) { + v = (int) value; + } else if (type == DataTypes.LONG) { + v = (long) value; + } else { + v = value; + } + return new TestCaseSupplier.TypedData(v, type, name).forceLiteral(); } - @Override - protected List argSpec() { - DataType[] numerics = numerics(); - DataType[] all = new DataType[numerics.length + 1]; - all[0] = DataTypes.DATETIME; - System.arraycopy(numerics, 0, all, 1, numerics.length); - return List.of(required(all)); + private static Matcher dateResultsMatcher(List typedData) { + if (typedData.get(0).type() == DataTypes.DATETIME) { + long millis = ((Number) typedData.get(0).data()).longValue(); + return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + } + return equalTo(((Number) typedData.get(0).data()).doubleValue()); } @Override protected Expression build(Source source, List args) { - return build(source, args.get(0)); + return new AutoBucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); } @Override - protected Matcher badTypeError(List spec, int badArgPosition, DataType badArgType) { - return equalTo("first argument of [exp] must be [datetime or numeric], found value [arg0] type [" + badArgType.typeName() + "]"); + public void testSimpleWithNulls() { + assumeFalse("we test nulls in parameters", true); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java new file mode 100644 index 0000000000000..6e62af7e964f9 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("st_disjoint") +public class SpatialDisjointTests extends SpatialRelatesFunctionTestCase { + public SpatialDisjointTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialDisjointTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialDisjoint(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java new file mode 100644 index 0000000000000..b95f05039630a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -0,0 +1,228 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests for {@link Locate} function. + */ +public class LocateTests extends AbstractFunctionTestCase { + public LocateTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + suppliers.add( + supplier( + "keywords", + DataTypes.KEYWORD, + DataTypes.KEYWORD, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "mixed keyword, text", + DataTypes.KEYWORD, + DataTypes.TEXT, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "texts", + DataTypes.TEXT, + DataTypes.TEXT, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "mixed text, keyword", + DataTypes.TEXT, + DataTypes.KEYWORD, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + public void testToString() { + assertThat( + evaluator( + new Locate( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + field("substr", DataTypes.KEYWORD), + field("start", DataTypes.INTEGER) + ) + ).get(driverContext()).toString(), + equalTo("LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]") + ); + } + + @Override + protected Expression build(Source source, List args) { + return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); + } + + public void testPrefixString() { + assertThat(process("a tiger", "a t", 0), equalTo(1)); + assertThat(process("a tiger", "a", 0), equalTo(1)); + assertThat(process("界世", "界", 0), equalTo(1)); + } + + public void testSuffixString() { + assertThat(process("a tiger", "er", 0), equalTo(6)); + assertThat(process("a tiger", "r", 0), equalTo(7)); + assertThat(process("世界", "界", 0), equalTo(2)); + } + + public void testMidString() { + assertThat(process("a tiger", "ti", 0), equalTo(3)); + assertThat(process("a tiger", "ige", 0), equalTo(4)); + assertThat(process("世界世", "界", 0), equalTo(2)); + } + + public void testOutOfRange() { + assertThat(process("a tiger", "tigers", 0), equalTo(0)); + assertThat(process("a tiger", "ipa", 0), equalTo(0)); + assertThat(process("世界世", "\uD83C\uDF0D", 0), equalTo(0)); + } + + public void testExactString() { + assertThat(process("a tiger", "a tiger", 0), equalTo(1)); + assertThat(process("tigers", "tigers", 0), equalTo(1)); + assertThat(process("界世", "界世", 0), equalTo(1)); + } + + public void testSupplementaryCharacter() { + // some assertions about the supplementary (4-byte) character we'll use for testing + assert "𠜎".length() == 2; + assert "𠜎".codePointCount(0, 2) == 1; + assert "𠜎".getBytes(UTF_8).length == 4; + + assertThat(process("a ti𠜎er", "𠜎er", 0), equalTo(5)); + assertThat(process("a ti𠜎er", "i𠜎e", 0), equalTo(4)); + assertThat(process("a ti𠜎er", "ti𠜎", 0), equalTo(3)); + assertThat(process("a ti𠜎er", "er", 0), equalTo(6)); + assertThat(process("a ti𠜎er", "r", 0), equalTo(7)); + + assertThat(process("𠜎a ti𠜎er", "𠜎er", 0), equalTo(6)); + assertThat(process("𠜎a ti𠜎er", "i𠜎e", 0), equalTo(5)); + assertThat(process("𠜎a ti𠜎er", "ti𠜎", 0), equalTo(4)); + assertThat(process("𠜎a ti𠜎er", "er", 0), equalTo(7)); + assertThat(process("𠜎a ti𠜎er", "r", 0), equalTo(8)); + + // exact + assertThat(process("a ti𠜎er", "a ti𠜎er", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎abc", 0), equalTo(1)); + assertThat(process(" 𠜎𠜎𠜎abc", " 𠜎𠜎𠜎abc", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎 abc ", "𠜎𠜎𠜎 abc ", 0), equalTo(1)); + + // prefix + assertThat(process("𠜎abc", "𠜎", 0), equalTo(1)); + assertThat(process("𠜎 abc", "𠜎 ", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", 0), equalTo(1)); + assertThat(process(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", 0), equalTo(1)); + assertThat(process("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", 0), equalTo(1)); + + // suffix + assertThat(process("abc𠜎", "𠜎", 0), equalTo(4)); + assertThat(process("abc 𠜎", " 𠜎", 0), equalTo(4)); + assertThat(process("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(4)); + assertThat(process("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", 0), equalTo(4)); + assertThat(process("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", 0), equalTo(4)); + + // out of range + assertThat(process("𠜎a ti𠜎er", "𠜎a ti𠜎ers", 0), equalTo(0)); + assertThat(process("a ti𠜎er", "aa ti𠜎er", 0), equalTo(0)); + assertThat(process("abc𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(0)); + + assert "🐱".length() == 2 && "🐶".length() == 2; + assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; + assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; + assertThat(process("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", 0), equalTo(1)); + assertThat(process("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0), equalTo(2)); + assertThat(process("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0), equalTo(3)); + } + + private Integer process(String str, String substr, Integer start) { + try ( + EvalOperator.ExpressionEvaluator eval = evaluator( + new Locate( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + field("substr", DataTypes.KEYWORD), + new Literal(Source.EMPTY, start, DataTypes.INTEGER) + ) + ).get(driverContext()); + Block block = eval.eval(row(List.of(new BytesRef(str), new BytesRef(substr)))) + ) { + return block.isNull(0) ? Integer.valueOf(0) : ((Integer) toJavaObject(block, 0)); + } + } + + private static TestCaseSupplier supplier( + String name, + DataType firstType, + DataType secondType, + Supplier strValueSupplier, + Supplier substrValueSupplier, + Supplier startSupplier + ) { + return new TestCaseSupplier(name, List.of(firstType, secondType), () -> { + List values = new ArrayList<>(); + String expectedToString = "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; + + String value = strValueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(value), firstType, "0")); + + String substrValue = substrValueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(substrValue), secondType, "1")); + + Integer startValue = startSupplier.get(); + values.add(new TestCaseSupplier.TypedData(startValue, DataTypes.INTEGER, "2")); + + int expectedValue = 1 + value.indexOf(substrValue); + return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.INTEGER, equalTo(expectedValue)); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 4736ba2cc74d7..648fffada6dc1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.function.Supplier; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -161,6 +162,19 @@ public void testUnicode() { assert s.length() == 8 && s.codePointCount(0, s.length()) == 7; assertThat(process(s, 3, 1000), equalTo("tiger")); assertThat(process(s, -6, 1000), equalTo("\ud83c\udf09tiger")); + assert "🐱".length() == 2 && "🐶".length() == 2; + assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; + assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; + + for (Integer len : new Integer[] { null, 100, 100000 }) { + assertThat(process(s, 3, len), equalTo("tiger")); + assertThat(process(s, -6, len), equalTo("\ud83c\udf09tiger")); + + assertThat(process("🐱Meow!🐶Woof!", 0, len), equalTo("🐱Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 1, len), equalTo("🐱Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 2, len), equalTo("Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 3, len), equalTo("eow!🐶Woof!")); + } } public void testNegativeLength() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 63c2a33543073..a60999baba9fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -156,6 +157,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -324,6 +326,52 @@ public void testCombineProjections() { var relation = as(limit.child(), EsRelation.class); } + /** + * Expects + * Project[[languages{f}#12 AS f2]] + * \_Limit[1000[INTEGER]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testCombineProjectionsWithEvalAndDrop() { + var plan = plan(""" + from test + | eval f1 = languages, f2 = f1 + | keep f2 + """); + + var keep = as(plan, Project.class); + assertThat(Expressions.names(keep.projections()), contains("f2")); + assertThat(Expressions.name(Alias.unwrap(keep.projections().get(0))), is("languages")); + var limit = as(keep.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + + } + + /** + * Expects + * Project[[last_name{f}#26, languages{f}#25 AS f2, f4{r}#13]] + * \_Eval[[languages{f}#25 + 3[INTEGER] AS f4]] + * \_Limit[1000[INTEGER]] + * \_EsRelation[test][_meta_field{f}#28, emp_no{f}#22, first_name{f}#23, ..] + */ + public void testCombineProjectionsWithEval() { + var plan = plan(""" + from test + | eval f1 = languages, f2 = f1, f3 = 1 + 2, f4 = f3 + languages + | keep emp_no, *name, salary, f* + | drop f3 + | keep last_name, f2, f4 + """); + + var keep = as(plan, Project.class); + assertThat(Expressions.names(keep.projections()), contains("last_name", "f2", "f4")); + var eval = as(keep.child(), Eval.class); + assertThat(Expressions.names(eval.fields()), contains("f4")); + var add = as(Alias.unwrap(eval.fields().get(0)), Add.class); + var limit = as(eval.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + } + public void testCombineProjectionWithFilterInBetween() { var plan = plan(""" from test @@ -366,6 +414,27 @@ public void testCombineProjectionWithAggregation() { assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[last_name{f}#23, first_name{f}#20, k{r}#4],[SUM(salary{f}#24) AS s, last_name{f}#23, first_name{f}#20, first_n + * ame{f}#20 AS k]] + * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] + */ + public void testCombineProjectionWithAggregationAndEval() { + var plan = plan(""" + from test + | eval k = first_name, k1 = k + | stats s = sum(salary) by last_name, first_name, k, k1 + | keep s, last_name, first_name, k + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(agg.aggregates()), contains("s", "last_name", "first_name", "k")); + assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name", "k")); + } + /** * Expects * TopN[[Order[x{r}#10,ASC,LAST]],1000[INTEGER]] @@ -3765,12 +3834,11 @@ public void testNoWrongIsNotNullPruning() { * * For DISSECT expects the following; the others are similar. * - * EsqlProject[[first_name{f}#37, emp_no{r}#33, salary{r}#34]] - * \_TopN[[Order[$$emp_no$temp_name$36{r}#46 + $$salary$temp_name$41{r}#47 * 13[INTEGER],ASC,LAST], Order[NEG($$salary$t - * emp_name$41{r}#47),DESC,FIRST]],3[INTEGER]] - * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, parser=org.elasticsearch.dissect.Dissect - * Parser@b6858b],[emp_no{r}#33, salary{r}#34]] - * \_Eval[[emp_no{f}#36 AS $$emp_no$temp_name$36, salary{f}#41 AS $$salary$temp_name$41]] + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] */ public void testPushdownWithOverwrittenName() { @@ -3783,7 +3851,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepAfter = """ FROM test - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | KEEP first_name, emp_no, salary | LIMIT 3 @@ -3792,7 +3860,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepFirst = """ FROM test | KEEP emp_no, salary, first_name - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | LIMIT 3 """; @@ -3809,20 +3877,27 @@ public void testPushdownWithOverwrittenName() { assertThat(projections.get(2).name(), equalTo("salary")); var topN = as(project.child(), TopN.class); - assertThat(topN.order().size(), is(2)); + assertThat(topN.order().size(), is(3)); - var firstOrderExpr = as(topN.order().get(0), Order.class); - var mul = as(firstOrderExpr.child(), Mul.class); - var add = as(mul.left(), Add.class); - var renamed_emp_no = as(add.left(), ReferenceAttribute.class); - var renamed_salary = as(add.right(), ReferenceAttribute.class); + var firstOrder = as(topN.order().get(0), Order.class); + assertThat(firstOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamed_emp_no = as(firstOrder.child(), ReferenceAttribute.class); assertThat(renamed_emp_no.toString(), startsWith("$$emp_no$temp_name")); + + var secondOrder = as(topN.order().get(1), Order.class); + assertThat(secondOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_salary = as(secondOrder.child(), ReferenceAttribute.class); assertThat(renamed_salary.toString(), startsWith("$$salary$temp_name")); - var secondOrderExpr = as(topN.order().get(1), Order.class); - var neg = as(secondOrderExpr.child(), Neg.class); - var renamed_salary2 = as(neg.field(), ReferenceAttribute.class); - assert (renamed_salary2.semanticEquals(renamed_salary) && renamed_salary2.equals(renamed_salary)); + var thirdOrder = as(topN.order().get(2), Order.class); + assertThat(thirdOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(thirdOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_emp_no2 = as(thirdOrder.child(), ReferenceAttribute.class); + assertThat(renamed_emp_no2.toString(), startsWith("$$emp_no$temp_name")); + + assert (renamed_emp_no2.semanticEquals(renamed_emp_no) && renamed_emp_no2.equals(renamed_emp_no)); Eval renamingEval = null; if (overwritingCommand.startsWith("EVAL")) { @@ -3846,8 +3921,210 @@ public void testPushdownWithOverwrittenName() { for (Alias field : renamingEval.fields()) { attributesCreatedInEval.add(field.toAttribute()); } - assert (attributesCreatedInEval.contains(renamed_emp_no)); - assert (attributesCreatedInEval.contains(renamed_salary)); + assertThat(attributesCreatedInEval, allOf(hasItem(renamed_emp_no), hasItem(renamed_salary), hasItem(renamed_emp_no2))); + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 3 for the other overwritingCommands + // emp_no ASC nulls first + Alias empNoAsc = renamingEval.fields().get(0); + assertThat(empNoAsc.toAttribute(), equalTo(renamed_emp_no)); + var emp_no = as(empNoAsc.child(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + + // salary DESC nulls last + Alias salaryDesc = renamingEval.fields().get(1); + assertThat(salaryDesc.toAttribute(), equalTo(renamed_salary)); + var salary_desc = as(salaryDesc.child(), FieldAttribute.class); + assertThat(salary_desc.name(), equalTo("salary")); + + assertThat(renamingEval.child(), instanceOf(EsRelation.class)); + } + } + + /** + * Expects + * Project[[min{r}#4, languages{f}#11]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#18,ASC,LAST]],1000[INTEGER]] + * \_Eval[[min{r}#4 + languages{f}#11 AS $$order_by$temp_name$0]] + * \_Aggregate[[languages{f}#11],[MIN(salary{f}#13) AS min, languages{f}#11]] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + */ + public void testReplaceSortByExpressionsWithStats() { + var plan = optimizedPlan(""" + from test + | stats min = min(salary) by languages + | sort min + languages + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("min", "languages")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(1)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + var aggregate = as(eval.child(), Aggregate.class); + var aggregates = aggregate.aggregates(); + assertThat(Expressions.names(aggregates), contains("min", "languages")); + var unwrapped = Alias.unwrap(aggregates.get(0)); + var min = as(unwrapped, Min.class); + as(aggregate.child(), EsRelation.class); + } + + /** + * Expects + * + * Project[[salary{f}#19, languages{f}#17, emp_no{f}#14]] + * \_TopN[[Order[$$order_by$0$0{r}#24,ASC,LAST], Order[emp_no{f}#14,DESC,FIRST]],1000[INTEGER]] + * \_Eval[[salary{f}#19 / 10000[INTEGER] + languages{f}#17 AS $$order_by$0$0]] + * \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..] + */ + public void testReplaceSortByExpressionsMultipleSorts() { + var plan = optimizedPlan(""" + from test + | sort salary/10000 + languages, emp_no desc + | eval d = emp_no + | sort salary/10000 + languages, d desc + | keep salary, languages, emp_no + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("salary", "languages", "emp_no")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + ReferenceAttribute expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + order = as(topN.order().get(1), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + FieldAttribute empNo = as(order.child(), FieldAttribute.class); + assertThat(empNo.name(), equalTo("emp_no")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(fields.size(), equalTo(1)); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + Alias salaryAddLanguages = eval.fields().get(0); + var add = as(salaryAddLanguages.child(), Add.class); + var div = as(add.left(), Div.class); + var salary = as(div.left(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _10000 = as(div.right(), Literal.class); + assertThat(_10000.value(), equalTo(10000)); + var languages = as(add.right(), FieldAttribute.class); + assertThat(languages.name(), equalTo("languages")); + + as(eval.child(), EsRelation.class); + } + + /** + * For DISSECT expects the following; the others are similar. + * + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] + * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] + */ + public void testReplaceSortByExpressions() { + List overwritingCommands = List.of( + "EVAL emp_no = 3*emp_no, salary = -2*emp_no-salary", + "DISSECT first_name \"%{emp_no} %{salary}\"", + "GROK first_name \"%{WORD:emp_no} %{WORD:salary}\"", + "ENRICH languages_idx ON first_name WITH emp_no = language_code, salary = language_code" + ); + + String queryTemplateKeepAfter = """ + FROM test + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | KEEP first_name, emp_no, salary + | LIMIT 3 + """; + // Equivalent but with KEEP first - ensures that attributes in the final projection are correct after pushdown rules were applied. + String queryTemplateKeepFirst = """ + FROM test + | KEEP emp_no, salary, first_name + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | LIMIT 3 + """; + + for (String overwritingCommand : overwritingCommands) { + String queryTemplate = randomBoolean() ? queryTemplateKeepFirst : queryTemplateKeepAfter; + var plan = optimizedPlan(LoggerMessageFormat.format(null, queryTemplate, overwritingCommand)); + + var project = as(plan, Project.class); + var projections = project.projections(); + assertThat(projections.size(), equalTo(3)); + assertThat(projections.get(0).name(), equalTo("first_name")); + assertThat(projections.get(1).name(), equalTo("emp_no")); + assertThat(projections.get(2).name(), equalTo("salary")); + + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var firstOrderExpr = as(topN.order().get(0), Order.class); + assertThat(firstOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamedEmpNoSalaryExpression = as(firstOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedEmpNoSalaryExpression.toString(), startsWith("$$order_by$0$")); + + var secondOrderExpr = as(topN.order().get(1), Order.class); + assertThat(secondOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamedNegatedSalaryExpression = as(secondOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedNegatedSalaryExpression.toString(), startsWith("$$order_by$1$")); + + Eval renamingEval = null; + if (overwritingCommand.startsWith("EVAL")) { + // Multiple EVALs should be merged, so there's only one. + renamingEval = as(topN.child(), Eval.class); + } + if (overwritingCommand.startsWith("DISSECT")) { + var dissect = as(topN.child(), Dissect.class); + renamingEval = as(dissect.child(), Eval.class); + } + if (overwritingCommand.startsWith("GROK")) { + var grok = as(topN.child(), Grok.class); + renamingEval = as(grok.child(), Eval.class); + } + if (overwritingCommand.startsWith("ENRICH")) { + var enrich = as(topN.child(), Enrich.class); + renamingEval = as(enrich.child(), Eval.class); + } + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 2 for the other overwritingCommands + + // 13*(emp_no+salary) + Alias _13empNoSalary = renamingEval.fields().get(0); + assertThat(_13empNoSalary.toAttribute(), equalTo(renamedEmpNoSalaryExpression)); + var mul = as(_13empNoSalary.child(), Mul.class); + var add = as(mul.left(), Add.class); + var emp_no = as(add.left(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + var salary = as(add.right(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _13 = as(mul.right(), Literal.class); + assertThat(_13.value(), equalTo(13)); + + // -salary + Alias negatedSalary = renamingEval.fields().get(1); + assertThat(negatedSalary.toAttribute(), equalTo(renamedNegatedSalaryExpression)); + var neg = as(negatedSalary.child(), Neg.class); + assertThat(neg.field(), equalTo(salary)); assertThat(renamingEval.child(), instanceOf(EsRelation.class)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 180a8ff16f4eb..fb2362851e43c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; @@ -2933,6 +2934,7 @@ private record TestSpatialRelation(ShapeRelation relation, TestDataSource index, String function() { return switch (relation) { case INTERSECTS -> "ST_INTERSECTS"; + case DISJOINT -> "ST_DISJOINT"; case WITHIN -> "ST_WITHIN"; case CONTAINS -> "ST_CONTAINS"; default -> throw new IllegalArgumentException("Unsupported relation: " + relation); @@ -2942,6 +2944,7 @@ String function() { Class functionClass() { return switch (relation) { case INTERSECTS -> SpatialIntersects.class; + case DISJOINT -> SpatialDisjoint.class; case WITHIN -> literalRight ? SpatialWithin.class : SpatialContains.class; case CONTAINS -> literalRight ? SpatialContains.class : SpatialWithin.class; default -> throw new IllegalArgumentException("Unsupported relation: " + relation); @@ -2975,12 +2978,16 @@ public void testPushDownSpatialRelatesStringToSource() { TestSpatialRelation[] tests = new TestSpatialRelation[] { new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), @@ -3027,10 +3034,16 @@ public void testPushDownSpatialRelatesStringToSourceAndUseDocValuesForCentroid() TestSpatialRelation[] tests = new TestSpatialRelation[] { new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java index c2e4e2aa2ca98..b16bea7c65b5b 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java @@ -367,7 +367,7 @@ public List getRestHandlers( ) { return List.of( new RestGetGlobalCheckpointsAction(), - new RestFleetSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), + new RestFleetSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature), new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), new RestGetSecretsAction(), new RestPostSecretsAction(), diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java index 7bba867a74761..f4c26f5bcd094 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java @@ -19,7 +19,7 @@ public class GetSecretResponse extends ActionResponse implements ToXContentObject { - private String id; + private final String id; private final String value; public GetSecretResponse(StreamInput in) throws IOException { diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java index 28cc7c5172631..530b64729a5d1 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java @@ -77,7 +77,6 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { final MultiSearchRequest multiSearchRequest = RestMultiSearchAction.parseRequest( request, - namedWriteableRegistry, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature, diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java index e1281f4f20a4c..a6c369734f0e3 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; @@ -39,16 +38,10 @@ public class RestFleetSearchAction extends BaseRestHandler { private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestFleetSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestFleetSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -79,15 +72,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli IntConsumer setSize = size -> searchRequest.source().size(size); request.withContentOrSourceParamParserOrNull(parser -> { - RestSearchAction.parseSearchRequest( - searchRequest, - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ); + RestSearchAction.parseSearchRequest(searchRequest, request, parser, clusterSupportsFeature, setSize, searchUsageHolder); String[] stringWaitForCheckpoints = request.paramAsStringArray("wait_for_checkpoints", Strings.EMPTY_ARRAY); final long[] waitForCheckpoints = new long[stringWaitForCheckpoints.length]; for (int i = 0; i < stringWaitForCheckpoints.length; ++i) { diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java index a7cd9c606b3c6..c0a5157557f58 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.XContentBuilder; @@ -16,20 +15,11 @@ public class SamlInitiateSingleSignOnResponse extends ActionResponse { - private String postUrl; - private String samlResponse; - private String entityId; - private String samlStatus; - private String error; - - public SamlInitiateSingleSignOnResponse(StreamInput in) throws IOException { - super(in); - this.entityId = in.readString(); - this.postUrl = in.readString(); - this.samlResponse = in.readString(); - this.samlStatus = in.readString(); - this.error = in.readOptionalString(); - } + private final String postUrl; + private final String samlResponse; + private final String entityId; + private final String samlStatus; + private final String error; public SamlInitiateSingleSignOnResponse( String entityId, diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java index 8e8a18f862bd7..6f1fb3cc32193 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,11 +16,6 @@ public class SamlMetadataResponse extends ActionResponse { private final String xmlString; - public SamlMetadataResponse(StreamInput in) throws IOException { - super(in); - this.xmlString = in.readString(); - } - public SamlMetadataResponse(String xmlString) { this.xmlString = Objects.requireNonNull(xmlString, "Metadata XML string must be provided"); } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java index e2b32c7e7023c..b09abb190ef7c 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,14 +20,6 @@ public class SamlValidateAuthnRequestResponse extends ActionResponse { private final boolean forceAuthn; private final Map authnState; - public SamlValidateAuthnRequestResponse(StreamInput in) throws IOException { - super(in); - this.spEntityId = in.readString(); - this.assertionConsumerService = in.readString(); - this.forceAuthn = in.readBoolean(); - this.authnState = in.readGenericMap(); - } - public SamlValidateAuthnRequestResponse(String spEntityId, String acs, boolean forceAuthn, Map authnState) { this.spEntityId = Objects.requireNonNull(spEntityId, "spEntityId is required for successful responses"); this.assertionConsumerService = Objects.requireNonNull(acs, "ACS is required for successful responses"); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 2aff5257a6ebf..fc3e46882ef84 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -191,7 +191,7 @@ private void createIndex(String index, String alias, boolean isTimeSeries) throw createIndexWithSettings(client(), index, alias, settings, mapping); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105437") + @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/105437") public void testRollupIndex() throws Exception { createIndex(index, alias, true); index(client(), index, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5)); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java similarity index 86% rename from x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java rename to x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index ccda986a8d280..0f23e0b33d774 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.InferencePlugin; -import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elser.ElserInternalModel; import org.elasticsearch.xpack.inference.services.elser.ElserInternalService; import org.elasticsearch.xpack.inference.services.elser.ElserInternalServiceSettingsTests; @@ -55,13 +55,13 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; -public class ModelRegistryImplIT extends ESSingleNodeTestCase { +public class ModelRegistryIT extends ESSingleNodeTestCase { - private ModelRegistryImpl ModelRegistryImpl; + private ModelRegistry modelRegistry; @Before public void createComponents() { - ModelRegistryImpl = new ModelRegistryImpl(client()); + modelRegistry = new ModelRegistry(client()); } @Override @@ -75,7 +75,7 @@ public void testStoreModel() throws Exception { AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), storeModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); assertThat(storeModelHolder.get(), is(true)); assertThat(exceptionHolder.get(), is(nullValue())); @@ -87,7 +87,7 @@ public void testStoreModelWithUnknownFields() throws Exception { AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), storeModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); assertNull(storeModelHolder.get()); assertNotNull(exceptionHolder.get()); @@ -106,12 +106,12 @@ public void testGetModel() throws Exception { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); // now get the model - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertThat(modelHolder.get(), not(nullValue())); @@ -133,13 +133,13 @@ public void testStoreModelFailsWhenModelExists() throws Exception { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertThat(exceptionHolder.get(), is(nullValue())); putModelHolder.set(false); // an model with the same id exists - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(false)); assertThat(exceptionHolder.get(), not(nullValue())); assertThat( @@ -154,20 +154,20 @@ public void testDeleteModel() throws Exception { Model model = buildElserModelConfig(id, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); } AtomicReference deleteResponseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.deleteModel("model1", listener), deleteResponseHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.deleteModel("model1", listener), deleteResponseHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertTrue(deleteResponseHolder.get()); // get should fail deleteResponseHolder.set(false); - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), not(nullValue())); assertFalse(deleteResponseHolder.get()); @@ -187,13 +187,13 @@ public void testGetModelsByTaskType() throws InterruptedException { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); } AtomicReference exceptionHolder = new AtomicReference<>(); - AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(3)); var sparseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.SPARSE_EMBEDDING) @@ -204,7 +204,7 @@ public void testGetModelsByTaskType() throws InterruptedException { assertThat(m.secrets().keySet(), empty()); }); - blockingCall(listener -> ModelRegistryImpl.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(2)); var denseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.TEXT_EMBEDDING) @@ -228,13 +228,13 @@ public void testGetAllModels() throws InterruptedException { var model = createModel(randomAlphaOfLength(5), randomFrom(TaskType.values()), service); createdModels.add(model); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); } - AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getAllModels(listener), modelHolder, exceptionHolder); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(modelCount)); var getAllModels = modelHolder.get(); @@ -258,18 +258,18 @@ public void testGetModelWithSecrets() throws InterruptedException { AtomicReference exceptionHolder = new AtomicReference<>(); var modelWithSecrets = createModelWithSecrets(inferenceEntityId, randomFrom(TaskType.values()), service, secret); - blockingCall(listener -> ModelRegistryImpl.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), hasSize(1)); var secretSettings = (Map) modelHolder.get().secrets().get("secret_settings"); assertThat(secretSettings.get("secret"), equalTo(secret)); // get model without secrets - blockingCall(listener -> ModelRegistryImpl.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), empty()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index c598a58d014f9..c707f99e7eb65 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -24,11 +24,8 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.InferenceServiceRegistryImpl; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; -import org.elasticsearch.plugins.InferenceRegistryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; @@ -53,7 +50,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestInferenceAction; @@ -74,7 +71,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, InferenceRegistryPlugin { +public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin { /** * When this setting is true the verification check that @@ -99,8 +96,6 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); - private final SetOnce modelRegistry = new SetOnce<>(); - private List inferenceServiceExtensions; public InferencePlugin(Settings settings) { @@ -151,7 +146,7 @@ public Collection createComponents(PluginServices services) { ); httpFactory.set(httpRequestSenderFactory); - ModelRegistry modelReg = new ModelRegistryImpl(services.client()); + ModelRegistry modelRegistry = new ModelRegistry(services.client()); if (inferenceServiceExtensions == null) { inferenceServiceExtensions = new ArrayList<>(); @@ -162,13 +157,11 @@ public Collection createComponents(PluginServices services) { var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext(services.client()); // This must be done after the HttpRequestSenderFactory is created so that the services can get the // reference correctly - var inferenceRegistry = new InferenceServiceRegistryImpl(inferenceServices, factoryContext); - inferenceRegistry.init(services.client()); - inferenceServiceRegistry.set(inferenceRegistry); - modelRegistry.set(modelReg); + var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); + registry.init(services.client()); + inferenceServiceRegistry.set(registry); - // Don't return components as they will be registered using InferenceRegistryPlugin methods to retrieve them - return List.of(); + return List.of(modelRegistry, registry); } @Override @@ -266,14 +259,4 @@ public void close() { IOUtils.closeWhileHandlingException(inferenceServiceRegistry.get(), throttlerToClose); } - - @Override - public InferenceServiceRegistry getInferenceServiceRegistry() { - return inferenceServiceRegistry.get(); - } - - @Override - public ModelRegistry getModelRegistry() { - return modelRegistry.get(); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java index ad6042581f264..b55e2e6f8ebed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -23,12 +23,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 0f7e48c4f8140..2de1aecea118c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -25,6 +24,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index a480763f33c47..edaf42d7f1fc6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -16,11 +16,11 @@ import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportInferenceAction extends HandledTransportAction { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 556acfd89c9c6..85e8481f749d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -44,6 +43,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.io.IOException; import java.util.Map; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java similarity index 86% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 40921cd38f181..0f3aa5b82b189 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -32,7 +31,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -57,21 +55,49 @@ import static org.elasticsearch.core.Strings.format; -public class ModelRegistryImpl implements ModelRegistry { +public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} + /** + * Semi parsed model where inference entity id, task type and service + * are known but the settings are not parsed. + */ + public record UnparsedModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map settings, + Map secrets + ) { + + public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) { + if (modelConfigMap.config() == null) { + throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); + } + String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); + String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); + TaskType taskType = TaskType.fromString(taskTypeStr); + + return new UnparsedModel(inferenceEntityId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); + } + } + private static final String TASK_TYPE_FIELD = "task_type"; private static final String MODEL_ID_FIELD = "model_id"; - private static final Logger logger = LogManager.getLogger(ModelRegistryImpl.class); + private static final Logger logger = LogManager.getLogger(ModelRegistry.class); private final OriginSettingClient client; - @Inject - public ModelRegistryImpl(Client client) { + public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); } - @Override + /** + * Get a model with its secret settings + * @param inferenceEntityId Model to get + * @param listener Model listener + */ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets @@ -80,7 +106,7 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets @@ -101,7 +132,7 @@ public void getModel(String inferenceEntityId, ActionListener lis return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistryImpl::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); assert modelConfigs.size() == 1; delegate.onResponse(modelConfigs.get(0)); }); @@ -116,7 +147,12 @@ public void getModel(String inferenceEntityId, ActionListener lis client.search(modelSearch, searchListener); } - @Override + /** + * Get all models of a particular task type. + * Secret settings are not included + * @param taskType The task type + * @param listener Models listener + */ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // Not an error if no models of this task_type @@ -125,7 +161,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // Not an error if no models of this task_type @@ -150,7 +190,7 @@ public void getAllModels(ActionListener> listener) { return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistryImpl::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); delegate.onResponse(modelConfigs); }); @@ -217,7 +257,6 @@ private ModelConfigMap createModelConfigMap(SearchHits hits, String inferenceEnt ); } - @Override public void storeModel(Model model, ActionListener listener) { ActionListener bulkResponseActionListener = getStoreModelListener(model, listener); @@ -314,7 +353,6 @@ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkRes return null; } - @Override public void deleteModel(String inferenceEntityId, ActionListener listener) { DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); @@ -339,16 +377,4 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T private QueryBuilder documentIdQuery(String inferenceEntityId) { return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } - - private static UnparsedModel unparsedModelFromMap(ModelRegistryImpl.ModelConfigMap modelConfigMap) { - if (modelConfigMap.config() == null) { - throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); - } - String modelId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); - String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); - String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); - TaskType taskType = TaskType.fromString(taskTypeStr); - - return new UnparsedModel(modelId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java similarity index 92% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 10fd4f09e86ac..768f053295d13 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -46,7 +46,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class ModelRegistryImplTests extends ESTestCase { +public class ModelRegistryTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); @@ -66,9 +66,9 @@ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() var client = mockClient(); mockClientExecuteSearch(client, mockSearchResponse(SearchHits.EMPTY)); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.actionGet(TIMEOUT)); @@ -80,9 +80,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIn var unknownIndexHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", "unknown_index")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { unknownIndexHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> listener.actionGet(TIMEOUT)); @@ -97,9 +97,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var inferenceSecretsHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".secrets-inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceSecretsHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -114,9 +114,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -148,9 +148,9 @@ public void testGetModelWithSecrets() { mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); var modelConfig = listener.actionGet(TIMEOUT); @@ -177,9 +177,9 @@ public void testGetModelNoSecrets() { mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModel("1", listener); registry.getModel("1", listener); @@ -202,7 +202,7 @@ public void testStoreModel_ReturnsTrue_WhenNoFailuresOccur() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -219,7 +219,7 @@ public void testStoreModel_ThrowsException_WhenBulkResponseIsEmpty() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -250,7 +250,7 @@ public void testStoreModel_ThrowsResourceAlreadyExistsException_WhenFailureIsAVe mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -276,7 +276,7 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e0ce1f92b2a37..d30c249813cd2 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -362,9 +362,10 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); - return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new); + return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new, isIndexed()); }; } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java index 0a312933768fb..2f936531f8c72 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java @@ -17,13 +17,16 @@ public class UnsignedLongIndexFieldData extends IndexNumericFieldData { private final IndexNumericFieldData signedLongIFD; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; UnsignedLongIndexFieldData( IndexNumericFieldData signedLongFieldData, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.signedLongIFD = signedLongFieldData; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -51,6 +54,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + protected boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return NumericType.LONG; diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java index 20ca6d8847d79..4fe3ed61114c3 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.threadpool.ThreadPool; @@ -35,7 +34,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase { @@ -46,7 +44,6 @@ public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase public void setUpMocks() { jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); } public void testTriggerDeleteJobsInStateDeletingWithoutDeletionTask() throws InterruptedException { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java index 0a7cee96df145..30f84a97bcfb0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java @@ -39,13 +39,11 @@ public class MlInitializationServiceIT extends MlNativeAutodetectIntegTestCase { - private ThreadPool threadPool; private MlInitializationService mlInitializationService; @Before public void setUpMocks() { - threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + final var threadPool = mock(ThreadPool.class); when(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); MlDailyMaintenanceService mlDailyMaintenanceService = mock(MlDailyMaintenanceService.class); ClusterService clusterService = mock(ClusterService.class); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index cb270c7f19ae8..2c5485b8d467f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -730,7 +730,8 @@ public void testToXContent() throws IOException { "all_in_bytes": 0, "coordinating_rejections": 0, "primary_rejections": 0, - "replica_rejections": 0 + "replica_rejections": 0, + "primary_document_rejections": 0 }, "limit_in_bytes": 0 } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index d69178f158a88..fcdc116cab725 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; + import java.util.Map; final class CO2Calculator { @@ -52,6 +54,7 @@ public double getAnnualCO2Tons(String hostID, long samples) { return getKiloWattsPerCore(host) * getCO2TonsPerKWH(host) * annualCoreHours * getDatacenterPUE(host); } + @UpdateForV9 // only allow OTEL semantic conventions private double getKiloWattsPerCore(HostMetadata host) { return switch (host.hostArchitecture) { // For the OTEL donation of the profiling agent, we switch to OTEL semantic conventions, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java index 65b342abddd9d..c851b372cb2db 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -24,7 +25,9 @@ public class GetFlamegraphResponse extends ActionResponse implements ChunkedToXC private final int size; private final double samplingRate; private final long selfCPU; + @UpdateForV9 // remove this field - it is unused in Kibana private final long totalCPU; + @UpdateForV9 // remove this field - it is unused in Kibana private final long totalSamples; private final List> edges; private final List fileIds; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java index 89c0b4ab6b0fb..4cad1104f783b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.util.Collections; @@ -29,8 +30,10 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX private final Map stackFrames; @Nullable private final Map executables; + @UpdateForV9 // remove this field - it is unused in Kibana @Nullable private final Map stackTraceEvents; + @UpdateForV9 // remove this field - it is unused in Kibana private final int totalFrames; private final double samplingRate; private final long totalSamples; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java index e1e3e27e951bf..aae6615114f43 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -30,6 +31,7 @@ final class HostMetadata implements ToXContentObject { this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } + @UpdateForV9 // remove fallback to the "profiling.host.machine" field and remove it from the component template "profiling-hosts". public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java index 3aa0a79df13bc..d694ffd2cbebc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -73,6 +74,7 @@ public static InstanceType fromHostSource(Map source) { return new InstanceType(provider, region, null); } + @UpdateForV9 // remove this method private static InstanceType fromObsoleteHostSource(Map source) { // Check and handle AWS. String region = (String) source.get("ec2.placement.region"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 0acdc7c37ce09..d7c9e61b73a3a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -149,8 +149,10 @@ public TransportGetStackTracesAction( } @Override - protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionListener submitListener) { + protected void doExecute(Task task, GetStackTracesRequest request, ActionListener submitListener) { licenseChecker.requireSupportedLicense(); + assert task instanceof CancellableTask; + final CancellableTask submitTask = (CancellableTask) task; GetStackTracesResponseBuilder responseBuilder = new GetStackTracesResponseBuilder(request); Client client = new ParentTaskAssigningClient(this.nodeClient, transportService.getLocalNode(), submitTask); if (request.isUserProvidedIndices()) { @@ -161,7 +163,7 @@ protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionL } private void searchProfilingEvents( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -201,7 +203,7 @@ private void searchProfilingEvents( } private void searchGenericEvents( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -240,7 +242,7 @@ private void searchGenericEvents( } private void searchGenericEventGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -320,7 +322,7 @@ private void searchGenericEventGroupedByStackTrace( } private void searchEventGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -432,7 +434,7 @@ The same stacktraces may come from different hosts (eventually from different da } private ActionListener handleEventsGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener, @@ -471,12 +473,12 @@ private static long getAggValueAsLong(SearchResponse searchResponse, String fiel } private void retrieveStackTraces( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener ) { - if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { + if (submitTask.notifyIfCancelled(submitListener)) { return; } List eventIds = new ArrayList<>(responseBuilder.getStackTraceEvents().keySet()); @@ -554,7 +556,7 @@ static List> sliced(List c, int slices) { private class StackTraceHandler { private final AtomicInteger expectedResponses; - private final Task submitTask; + private final CancellableTask submitTask; private final ClusterState clusterState; private final Client client; private final GetStackTracesResponseBuilder responseBuilder; @@ -568,7 +570,7 @@ private class StackTraceHandler { private final Map hostMetadata; private StackTraceHandler( - Task submitTask, + CancellableTask submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -691,7 +693,7 @@ public void mayFinish() { } private void retrieveStackTraceDetails( - Task submitTask, + CancellableTask submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -699,7 +701,7 @@ private void retrieveStackTraceDetails( List executableIds, ActionListener submitListener ) { - if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { + if (submitTask.notifyIfCancelled(submitListener)) { return; } List stackFrameIndices = resolver.resolve( diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java index afbbe24979466..f30fd18443550 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java @@ -21,6 +21,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class TopNFunctionTests extends ESTestCase { + public void testToXContent() throws IOException { String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; int frameType = 1; @@ -54,8 +55,10 @@ public void testToXContent() throws IOException { .rawValue("2.2000") .field("total_annual_co2_tons") .rawValue("22.0000") - .field("self_annual_costs_usd", "12.0000") - .field("total_annual_costs_usd", "120.0000") + .field("self_annual_costs_usd") + .rawValue("12.0000") + .field("total_annual_costs_usd") + .rawValue("120.0000") .endObject(); XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index 1748c1be86b78..665548c432ca0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -97,7 +97,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestRollupSearchAction(namedWriteableRegistry, clusterSupportsFeature), + new RestRollupSearchAction(clusterSupportsFeature), new RestPutRollupJobAction(), new RestStartRollupJobAction(), new RestStopRollupJobAction(), diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java index 2e02f1d12fb69..a2e795d07aaf2 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -28,11 +27,9 @@ public class RestRollupSearchAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestRollupSearchAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { - this.namedWriteableRegistry = namedWriteableRegistry; + public RestRollupSearchAction(Predicate clusterSupportsFeature) { this.clusterSupportsFeature = clusterSupportsFeature; } @@ -54,7 +51,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient searchRequest, restRequest, parser, - namedWriteableRegistry, clusterSupportsFeature, size -> searchRequest.source().size(size) ) diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index 0cf6cb93c865b..207df0faddd07 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -449,9 +449,9 @@ public Map getRepositories( (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { if (enabled.get()) { - super.assertSnapshotOrGenericThread(); + super.assertSnapshotOrStatelessPermittedThreadPool(); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java index 6800dea01863a..4a15d00bc8168 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java @@ -248,7 +248,7 @@ public Map getRepositories( "test-fs", (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // ignore } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index 9c36d7b762871..c54ead2bdbc45 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -609,7 +609,7 @@ private void testDirectories( ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java index f52102ded442b..817d5739b4b9f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.shrink.ResizeAction; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -78,7 +77,6 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { } else { queries = null; } - final String action = randomFrom(ShrinkAction.NAME, ResizeAction.NAME); IndicesAccessControl accessControl = new IndicesAccessControl( true, Collections.singletonMap( @@ -94,7 +92,7 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("bar", "foo"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("bar", "foo"), ResizeAction.NAME, null); AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; @@ -126,7 +124,6 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() .user(new User("john", "role")) .realmRef(new RealmRef("realm", "type", "node", null)) .build(); - final String action = randomFrom(ShrinkAction.NAME, ResizeAction.NAME); IndicesAccessControl accessControl = new IndicesAccessControl(true, Collections.emptyMap()); new SecurityContext(Settings.EMPTY, threadContext).putIndicesAccessControl(accessControl); ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); @@ -134,7 +131,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("target", "source"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("target", "source"), ResizeAction.NAME, null); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; listener.onResponse(AuthorizationResult.deny()); @@ -159,7 +156,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() // swap target and source for success { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("source", "target"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("source", "target"), ResizeAction.NAME, null); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; listener.onResponse(AuthorizationResult.granted()); diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java index 62ae0d54eb584..b215a724c06f3 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java @@ -20,7 +20,7 @@ * Response for the sql action for translating SQL queries into ES requests */ public class SqlTranslateResponse extends ActionResponse implements ToXContentObject { - private SearchSourceBuilder source; + private final SearchSourceBuilder source; public SqlTranslateResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml index 4a88762ddb9ea..db41e0d0efaa1 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml @@ -432,6 +432,10 @@ setup: --- "Test frequent item sets unsupported types": + + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" - do: catch: /Field \[geo_point\] of type \[geo_point\] is not supported for aggregation \[frequent_item_sets\]/ search: diff --git a/x-pack/plugin/transform/qa/common/build.gradle b/x-pack/plugin/transform/qa/common/build.gradle new file mode 100644 index 0000000000000..9e7abfa2f977e --- /dev/null +++ b/x-pack/plugin/transform/qa/common/build.gradle @@ -0,0 +1,8 @@ +apply plugin: 'elasticsearch.internal-java-rest-test' + +dependencies { + api project(':libs:elasticsearch-x-content') + api project(':test:framework') + api project(xpackModule('core')) +} + diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java new file mode 100644 index 0000000000000..486dd7c581032 --- /dev/null +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration.common; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; + +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +public abstract class TransformCommonRestTestCase extends ESRestTestCase { + + protected static final String TRANSFORM_ENDPOINT = TransformField.REST_BASE_PATH_TRANSFORMS; + protected static final String AUTH_KEY = "Authorization"; + protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; + + protected static String getTransformEndpoint() { + return TRANSFORM_ENDPOINT; + } + + /** + * Returns the list of transform tasks as reported by the _tasks API. + */ + @SuppressWarnings("unchecked") + protected List getTransformTasks() throws IOException { + Request tasksRequest = new Request("GET", "/_tasks"); + tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); + Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); + + Map nodes = (Map) tasksResponse.get("nodes"); + if (nodes == null) { + return List.of(); + } + + List foundTasks = new ArrayList<>(); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map tasks = (Map) nodeInfo.get("tasks"); + if (tasks != null) { + foundTasks.addAll(tasks.keySet()); + } + } + return foundTasks; + } + + /** + * Returns the list of transform tasks for the given transform as reported by the _cluster/state API. + */ + @SuppressWarnings("unchecked") + protected List getTransformTasksFromClusterState(String transformId) throws IOException { + Request request = new Request("GET", "_cluster/state"); + Map response = entityAsMap(adminClient().performRequest(request)); + + List> tasks = (List>) XContentMapValues.extractValue( + response, + "metadata", + "persistent_tasks", + "tasks" + ); + + return tasks.stream().map(t -> (String) t.get("id")).filter(transformId::equals).toList(); + } + + @SuppressWarnings("unchecked") + protected void logAudits() throws Exception { + logger.info("writing audit messages to the log"); + Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); + searchRequest.setJsonEntity(""" + { + "size": 100, + "sort": [ { "timestamp": { "order": "asc" } } ] + }"""); + + assertBusy(() -> { + try { + refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); + Response searchResponse = client().performRequest(searchRequest); + + Map searchResult = entityAsMap(searchResponse); + List> searchHits = (List>) XContentMapValues.extractValue( + "hits.hits", + searchResult + ); + + for (Map hit : searchHits) { + Map source = (Map) XContentMapValues.extractValue("_source", hit); + String level = (String) source.getOrDefault("level", "info"); + logger.log( + Level.getLevel(level.toUpperCase(Locale.ROOT)), + "Transform audit: [{}] [{}] [{}] [{}]", + Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), + source.getOrDefault("transform_id", "n/a"), + source.getOrDefault("message", "n/a"), + source.getOrDefault("node_name", "n/a") + ); + } + } catch (ResponseException e) { + // see gh#54810, wrap temporary 503's as assertion error for retry + if (e.getResponse().getStatusLine().getStatusCode() != 503) { + throw e; + } + throw new AssertionError("Failed to retrieve audit logs", e); + } + }, 5, TimeUnit.SECONDS); + } + + protected void refreshIndex(String index) throws IOException { + Request refreshRequest = new Request("POST", index + "/_refresh"); + assertOK(adminClient().performRequest(refreshRequest)); + } +} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle index aab0d16e54f5c..32bb44850df6b 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle @@ -3,6 +3,7 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) + javaRestTestImplementation project(path: xpackModule('transform:qa:common')) } // location for keys and certificates diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java index 07b6bc9bd7770..27695eb080b0a 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java @@ -126,7 +126,7 @@ public void testLatest() throws Exception { waitUntilCheckpoint(transformConfig.getId(), 1L); stopTransform(transformConfig.getId()); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); var mappings = getIndexMapping(destIndexName, RequestOptions.DEFAULT); assertThat( (Map) XContentMapValues.extractValue(destIndexName + ".mappings", mappings), diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 538479c33b084..e7d54028caa20 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -22,7 +22,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; -import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.SyncConfig; @@ -37,9 +36,7 @@ import java.io.IOException; import java.time.Instant; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -247,23 +244,23 @@ public void testTransformLifecycleInALoop() throws Exception { // Create the continuous transform putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); - assertThatTransformTaskDoesNotExist(transformId); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId, RequestOptions.DEFAULT); // There is 1 transform task after start assertThat(getTransformTasks(), hasSize(1)); - assertThatTransformTaskExists(transformId); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); Thread.sleep(sleepAfterStartMillis); // There should still be 1 transform task as the transform is continuous assertThat(getTransformTasks(), hasSize(1)); - assertThatTransformTaskExists(transformId); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // Stop the transform with force set randomly stopTransform(transformId, true, null, false, force); // After the transform is stopped, there should be no transform task left assertThat(getTransformTasks(), is(empty())); - assertThatTransformTaskDoesNotExist(transformId); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); // Delete the transform deleteTransform(transformId); @@ -303,63 +300,6 @@ private String createConfig(String transformId, String sourceIndex, String destI return Strings.toString(config); } - /** - * Returns the list of transform tasks as reported by _tasks API. - */ - @SuppressWarnings("unchecked") - protected List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - final Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } - - /** - * Verifies that the given transform task exists in cluster state. - */ - private void assertThatTransformTaskExists(String transformId) throws IOException { - assertThatTransformTaskCountIsEqualTo(transformId, 1); - } - - /** - * Verifies that the given transform task does not exist in cluster state. - */ - private void assertThatTransformTaskDoesNotExist(String transformId) throws IOException { - assertThatTransformTaskCountIsEqualTo(transformId, 0); - } - - /** - * Verifies that the number of transform tasks in cluster state for the given transform is as expected. - */ - @SuppressWarnings("unchecked") - private void assertThatTransformTaskCountIsEqualTo(String transformId, int expectedCount) throws IOException { - Request request = new Request("GET", "_cluster/state"); - Map response = entityAsMap(adminClient().performRequest(request)); - - List> tasks = (List>) XContentMapValues.extractValue( - response, - "metadata", - "persistent_tasks", - "tasks" - ); - - assertThat("Tasks were: " + tasks, tasks.stream().filter(t -> transformId.equals(t.get("id"))).toList(), hasSize(expectedCount)); - } - public void testContinuousTransformUpdate() throws Exception { String indexName = "continuous-reviews-update"; createReviewsIndex(indexName, 10, NUM_USERS, TransformIT::getUserIdForRow, TransformIT::getDateStringForRow); @@ -447,7 +387,7 @@ public void testContinuousTransformUpdate() throws Exception { assertOK(searchResponse); var responseMap = entityAsMap(searchResponse); assertThat((Integer) XContentMapValues.extractValue("hits.total.value", responseMap), greaterThan(0)); - refreshIndex(dest, RequestOptions.DEFAULT); + refreshIndex(dest); }, 30, TimeUnit.SECONDS); stopTransform(config.getId()); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 6e13e936f5532..eb1a1258d5a96 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -10,7 +10,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.logging.log4j.Level; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -27,7 +26,6 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -40,22 +38,20 @@ import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource; +import org.elasticsearch.xpack.transform.integration.common.TransformCommonRestTestCase; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.time.Instant; import java.time.ZoneId; import java.util.Base64; import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -67,9 +63,8 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; -public abstract class TransformRestTestCase extends ESRestTestCase { +public abstract class TransformRestTestCase extends TransformCommonRestTestCase { - protected static String TRANSFORM_ENDPOINT = "/_transform/"; protected static final String AUTH_KEY = "Authorization"; protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; @@ -81,49 +76,6 @@ protected void cleanUp() throws Exception { waitForPendingTasks(); } - @SuppressWarnings("unchecked") - private void logAudits() throws Exception { - logger.info("writing audit messages to the log"); - Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); - searchRequest.setJsonEntity(""" - { - "size": 100, - "sort": [ { "timestamp": { "order": "asc" } } ] - }"""); - - assertBusy(() -> { - try { - refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN, RequestOptions.DEFAULT); - Response searchResponse = client().performRequest(searchRequest); - - Map searchResult = entityAsMap(searchResponse); - List> searchHits = (List>) XContentMapValues.extractValue( - "hits.hits", - searchResult - ); - - for (Map hit : searchHits) { - Map source = (Map) XContentMapValues.extractValue("_source", hit); - String level = (String) source.getOrDefault("level", "info"); - logger.log( - Level.getLevel(level.toUpperCase(Locale.ROOT)), - "Transform audit: [{}] [{}] [{}] [{}]", - Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), - source.getOrDefault("transform_id", "n/a"), - source.getOrDefault("message", "n/a"), - source.getOrDefault("node_name", "n/a") - ); - } - } catch (ResponseException e) { - // see gh#54810, wrap temporary 503's as assertion error for retry - if (e.getResponse().getStatusLine().getStatusCode() != 503) { - throw e; - } - throw new AssertionError("Failed to retrieve audit logs", e); - } - }, 5, TimeUnit.SECONDS); - } - protected void cleanUpTransforms() throws IOException { for (String id : createdTransformIds) { try { @@ -140,12 +92,6 @@ protected void cleanUpTransforms() throws IOException { createdTransformIds.clear(); } - protected void refreshIndex(String index, RequestOptions options) throws IOException { - var r = new Request("POST", index + "/_refresh"); - r.setOptions(options); - assertOK(adminClient().performRequest(r)); - } - protected Map getIndexMapping(String index, RequestOptions options) throws IOException { var r = new Request("GET", "/" + index + "/_mapping"); r.setOptions(options); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java index 2e509bedbce39..d17d9dbd20ffd 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java @@ -138,7 +138,7 @@ public void testPivotTransform() throws Exception { stopTransform(config.getId()); assertBusy(() -> { assertEquals("stopped", getTransformState(config.getId())); }); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); // Verify destination index mappings var mappings = (Map) XContentMapValues.extractValue( destIndexName + ".mappings", @@ -235,7 +235,7 @@ public void testLatestTransform() throws Exception { stopTransform(configWithRuntimeFields.getId()); assertBusy(() -> { assertEquals("stopped", getTransformState(configWithRuntimeFields.getId())); }); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); // Verify destination index mappings var destIndexMapping = getIndexMapping(destIndexName, RequestOptions.DEFAULT); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java index 5eac2bd2ebdf6..69c0e12ca4b55 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java @@ -254,7 +254,7 @@ public void testContinuousEvents() throws Exception { source.append("\r\n"); doBulk(source.toString(), false); } - refreshIndex(sourceIndexName, RequestOptions.DEFAULT); + refreshIndex(sourceIndexName); // start all transforms, wait until the processed all data and stop them startTransforms(); diff --git a/x-pack/plugin/transform/qa/single-node-tests/build.gradle b/x-pack/plugin/transform/qa/single-node-tests/build.gradle index d4f84ecb37c9c..7eeb8c97d1ae4 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/single-node-tests/build.gradle @@ -4,6 +4,7 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) + javaRestTestImplementation project(path: xpackModule('transform:qa:common')) } testClusters.configureEach { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 7c74e918a039f..09fbea29d4b15 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -9,33 +9,27 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.logging.log4j.Level; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.DestAlias; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; -import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; +import org.elasticsearch.xpack.transform.integration.common.TransformCommonRestTestCase; import org.junit.After; import org.junit.AfterClass; import java.io.IOException; -import java.time.Instant; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -44,10 +38,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public abstract class TransformRestTestCase extends ESRestTestCase { +public abstract class TransformRestTestCase extends TransformCommonRestTestCase { protected static final String TEST_PASSWORD = "x-pack-test-password"; - private static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; protected static final SecureString TEST_PASSWORD_SECURE_STRING = new SecureString(TEST_PASSWORD.toCharArray()); private static final String BASIC_AUTH_VALUE_SUPER_USER = basicAuthHeaderValue("x_pack_rest_user", TEST_PASSWORD_SECURE_STRING); @@ -538,7 +531,7 @@ protected Request createRequestWithSecondaryAuth( RequestOptions.Builder options = request.getOptions().toBuilder(); if (authHeader != null) { - options.addHeader("Authorization", authHeader); + options.addHeader(AUTH_KEY, authHeader); } if (secondaryAuthHeader != null) { options.addHeader(SECONDARY_AUTH_KEY, secondaryAuthHeader); @@ -563,10 +556,6 @@ void waitForTransformCheckpoint(String transformId, long checkpoint) throws Exce }, 30, TimeUnit.SECONDS); } - void refreshIndex(String index) throws IOException { - assertOK(client().performRequest(new Request("POST", index + "/_refresh"))); - } - @SuppressWarnings("unchecked") protected static List> getTransforms(List> expectedErrors) throws IOException { Request request = new Request("GET", getTransformEndpoint() + "_all"); @@ -688,73 +677,4 @@ protected void assertOneCount(String query, String field, int expected) throws I int actual = (Integer) ((List) XContentMapValues.extractValue(field, searchResult)).get(0); assertEquals(expected, actual); } - - protected static String getTransformEndpoint() { - return TransformField.REST_BASE_PATH_TRANSFORMS; - } - - @SuppressWarnings("unchecked") - private void logAudits() throws Exception { - logger.info("writing audit messages to the log"); - Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); - searchRequest.setJsonEntity(""" - { - "size": 100, - "sort": [ { "timestamp": { "order": "asc" } } ] - }"""); - - assertBusy(() -> { - try { - refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); - Response searchResponse = client().performRequest(searchRequest); - - Map searchResult = entityAsMap(searchResponse); - List> searchHits = (List>) XContentMapValues.extractValue( - "hits.hits", - searchResult - ); - - for (Map hit : searchHits) { - Map source = (Map) XContentMapValues.extractValue("_source", hit); - String level = (String) source.getOrDefault("level", "info"); - logger.log( - Level.getLevel(level.toUpperCase(Locale.ROOT)), - "Transform audit: [{}] [{}] [{}] [{}]", - Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), - source.getOrDefault("transform_id", "n/a"), - source.getOrDefault("message", "n/a"), - source.getOrDefault("node_name", "n/a") - ); - } - } catch (ResponseException e) { - // see gh#54810, wrap temporary 503's as assertion error for retry - if (e.getResponse().getStatusLine().getStatusCode() != 503) { - throw e; - } - throw new AssertionError("Failed to retrieve audit logs", e); - } - }, 5, TimeUnit.SECONDS); - } - - @SuppressWarnings("unchecked") - protected List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index e537a6f280ac0..0f807fbae45d1 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -95,10 +95,16 @@ public void testCreateAndDeleteTransformInALoop() throws IOException { try { // Create the batch transform createPivotReviewsTransform(transformId, destIndex, null); + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + // Wait until the transform finishes startAndWaitForTransform(transformId, destIndex); + // After the transform finishes, there should be no transform task left assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java index ba5b97bbcb062..66fbf2e892b56 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java @@ -162,10 +162,14 @@ public RestResponse buildResponse(SearchResponse searchResponse) throws Exceptio searchResponse.getShardFailures(), searchResponse.getClusters() ); - tileBuilder.addLayers(buildMetaLayer(meta, bounds, request, featureFactory)); - ensureOpen(); - tileBuilder.build().writeTo(bytesOut); - return new RestResponse(RestStatus.OK, MIME_TYPE, bytesOut.bytes()); + try { + tileBuilder.addLayers(buildMetaLayer(meta, bounds, request, featureFactory)); + ensureOpen(); + tileBuilder.build().writeTo(bytesOut); + return new RestResponse(RestStatus.OK, MIME_TYPE, bytesOut.bytes()); + } finally { + meta.decRef(); + } } } }); diff --git a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java index 341e92641f641..19f1133e4f14f 100644 --- a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java +++ b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java @@ -11,9 +11,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; -import org.hamcrest.Matchers; import org.junit.After; -import org.junit.Assert; import org.junit.Before; import java.io.IOException; @@ -21,6 +19,8 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.is; + /** * Parent test class for Watcher (not-YAML) based REST tests */ @@ -36,7 +36,7 @@ public final void startWatcher() throws Exception { case "stopped": Response startResponse = ESRestTestCase.adminClient().performRequest(new Request("POST", "/_watcher/_start")); boolean isAcknowledged = ObjectPath.createFromResponse(startResponse).evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); case "stopping": throw new AssertionError("waiting until stopping state reached stopped state to start again"); @@ -68,7 +68,7 @@ public final void stopWatcher() throws Exception { case "started": Response stopResponse = ESRestTestCase.adminClient().performRequest(new Request("POST", "/_watcher/_stop")); boolean isAcknowledged = ObjectPath.createFromResponse(stopResponse).evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until started state reached stopped state"); default: throw new AssertionError("unknown state[" + state + "]"); diff --git a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java index c7b1e0d0fcbee..ddcf976c84572 100644 --- a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java +++ b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java @@ -13,16 +13,15 @@ import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.hamcrest.Matchers; import org.junit.After; -import org.junit.Assert; import org.junit.Before; +import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.watcher.WatcherRestTestCase.deleteAllWatcherData; +import static org.hamcrest.Matchers.is; /** * Parent test class for Watcher YAML based REST tests @@ -40,25 +39,25 @@ public static Iterable parameters() throws Exception { @Before public final void startWatcher() throws Exception { ESTestCase.assertBusy(() -> { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped" -> { ClientYamlTestResponse startResponse = getAdminExecutionContext().callApi( "watcher.start", - emptyMap(), - emptyList(), - emptyMap() + Map.of(), + List.of(), + Map.of() ); - boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + boolean isAcknowledged = startResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); } case "stopping" -> throw new AssertionError("waiting until stopping state reached stopped state to start again"); case "starting" -> throw new AssertionError("waiting until starting state reached started state"); case "started" -> { - int watcherCount = (int) response.evaluate("stats.0.watch_count"); + int watcherCount = response.evaluate("stats.0.watch_count"); if (watcherCount > 0) { logger.info("expected 0 active watches, but got [{}], deleting watcher indices again", watcherCount); deleteAllWatcherData(); @@ -73,8 +72,8 @@ public final void startWatcher() throws Exception { @After public final void stopWatcher() throws Exception { ESTestCase.assertBusy(() -> { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped": // all good here, we are done @@ -84,14 +83,9 @@ public final void stopWatcher() throws Exception { case "starting": throw new AssertionError("waiting until starting state reached started state to stop"); case "started": - ClientYamlTestResponse stopResponse = getAdminExecutionContext().callApi( - "watcher.stop", - emptyMap(), - emptyList(), - emptyMap() - ); - boolean isAcknowledged = (boolean) stopResponse.evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + ClientYamlTestResponse stopResponse = getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + boolean isAcknowledged = stopResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until started state reached stopped state"); default: throw new AssertionError("unknown state[" + state + "]"); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index 7e16a0353f2cd..e77c7aba6824d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -141,7 +141,7 @@ public void postIndex(ShardId shardId, Engine.Index operation, Engine.IndexResul logger.debug("adding watch [{}] to trigger service", watch.id()); triggerService.add(watch); } else { - logger.debug("removing watch [{}] to trigger service", watch.id()); + logger.debug("removing watch [{}] from trigger service", watch.id()); triggerService.remove(watch.id()); } } else { @@ -179,7 +179,7 @@ public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { if (isWatchDocument(shardId.getIndexName())) { - logger.debug("removing watch [{}] to trigger service via delete", delete.id()); + logger.debug("removing watch [{}] from trigger service via delete", delete.id()); triggerService.remove(delete.id()); } return delete; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java index f6e34ccb243c8..cd0e066de2eaf 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java @@ -44,7 +44,7 @@ public class WatcherLifeCycleService implements ClusterStateListener { private final AtomicReference state = new AtomicReference<>(WatcherState.STARTED); private final AtomicReference> previousShardRoutings = new AtomicReference<>(Collections.emptyList()); private volatile boolean shutDown = false; // indicates that the node has been shutdown and we should never start watcher after this. - private volatile WatcherService watcherService; + private final WatcherService watcherService; private final EnumSet stopStates = EnumSet.of(WatcherState.STOPPED, WatcherState.STOPPING); WatcherLifeCycleService(ClusterService clusterService, WatcherService watcherService) { @@ -123,7 +123,6 @@ public void clusterChanged(ClusterChangedEvent event) { } else { logger.info("watcher has not been stopped. not currently in a stopping state, current state [{}]", state.get()); } - }); } } diff --git a/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java b/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java index f8977f8d3cf8d..250920382719a 100644 --- a/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); diff --git a/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java b/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java index 10352b54912e5..a8e522f3836fb 100644 --- a/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); diff --git a/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java b/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java index 5c083cb90cd69..9cb64bab89d34 100644 --- a/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e);